[ 493.497559] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 493.497923] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 493.498076] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 493.498341] env[62109]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 493.594276] env[62109]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62109) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 493.604658] env[62109]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62109) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 494.202932] env[62109]: INFO nova.virt.driver [None req-1109496c-e147-4cde-b356-e2a246fe087b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 494.272492] env[62109]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.272655] env[62109]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.272777] env[62109]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62109) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 497.307577] env[62109]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-cfbc91a8-7eab-427f-a722-9933feadce90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.331503] env[62109]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62109) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 497.331744] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-69917446-7221-40f5-aa5b-cec3f0f3bee6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.366449] env[62109]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 2dab7. [ 497.366723] env[62109]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.094s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.367167] env[62109]: INFO nova.virt.vmwareapi.driver [None req-1109496c-e147-4cde-b356-e2a246fe087b None None] VMware vCenter version: 7.0.3 [ 497.371482] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da55c17-c326-4cc6-9d1e-2a4ae77ed374 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.389908] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8f54db-85b2-497d-a551-20d172e36cfb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.396370] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92547d4-e09f-4c91-990d-6b26eccbe880 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.403246] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29938cd-14ad-4394-baab-7ff4572a4c6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.416931] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4742b9-cdf2-43ae-bfed-d91bc1046b1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.423451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af30e61c-a9dd-467f-b77f-92ff87fb4115 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.454249] env[62109]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-51f784d4-dfe7-479b-8663-2a84fe05d0d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.460199] env[62109]: DEBUG nova.virt.vmwareapi.driver [None req-1109496c-e147-4cde-b356-e2a246fe087b None None] Extension org.openstack.compute already exists. {{(pid=62109) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 497.462956] env[62109]: INFO nova.compute.provider_config [None req-1109496c-e147-4cde-b356-e2a246fe087b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 497.966335] env[62109]: DEBUG nova.context [None req-1109496c-e147-4cde-b356-e2a246fe087b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),e6559971-65f4-46b5-b64b-dd91043dd2b0(cell1) {{(pid=62109) load_cells /opt/stack/nova/nova/context.py:464}} [ 497.968543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.968704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.969307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.969785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Acquiring lock "e6559971-65f4-46b5-b64b-dd91043dd2b0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.969994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Lock "e6559971-65f4-46b5-b64b-dd91043dd2b0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.971022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Lock "e6559971-65f4-46b5-b64b-dd91043dd2b0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.992260] env[62109]: INFO dbcounter [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Registered counter for database nova_cell0 [ 498.001166] env[62109]: INFO dbcounter [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Registered counter for database nova_cell1 [ 498.004953] env[62109]: DEBUG oslo_db.sqlalchemy.engines [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62109) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.005325] env[62109]: DEBUG oslo_db.sqlalchemy.engines [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62109) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.010419] env[62109]: ERROR nova.db.main.api [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 498.010419] env[62109]: result = function(*args, **kwargs) [ 498.010419] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.010419] env[62109]: return func(*args, **kwargs) [ 498.010419] env[62109]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.010419] env[62109]: result = fn(*args, **kwargs) [ 498.010419] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.010419] env[62109]: return f(*args, **kwargs) [ 498.010419] env[62109]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 498.010419] env[62109]: return db.service_get_minimum_version(context, binaries) [ 498.010419] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.010419] env[62109]: _check_db_access() [ 498.010419] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.010419] env[62109]: stacktrace = ''.join(traceback.format_stack()) [ 498.010419] env[62109]: [ 498.011848] env[62109]: ERROR nova.db.main.api [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 498.011848] env[62109]: result = function(*args, **kwargs) [ 498.011848] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.011848] env[62109]: return func(*args, **kwargs) [ 498.011848] env[62109]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.011848] env[62109]: result = fn(*args, **kwargs) [ 498.011848] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.011848] env[62109]: return f(*args, **kwargs) [ 498.011848] env[62109]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 498.011848] env[62109]: return db.service_get_minimum_version(context, binaries) [ 498.011848] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.011848] env[62109]: _check_db_access() [ 498.011848] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.011848] env[62109]: stacktrace = ''.join(traceback.format_stack()) [ 498.011848] env[62109]: [ 498.012334] env[62109]: WARNING nova.objects.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 498.012462] env[62109]: WARNING nova.objects.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Failed to get minimum service version for cell e6559971-65f4-46b5-b64b-dd91043dd2b0 [ 498.012848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Acquiring lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 498.013017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Acquired lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 498.013289] env[62109]: DEBUG oslo_concurrency.lockutils [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Releasing lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 498.013619] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Full set of CONF: {{(pid=62109) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 498.013767] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ******************************************************************************** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 498.013896] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Configuration options gathered from: {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 498.014047] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 498.014246] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 498.014377] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ================================================================================ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 498.014614] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] allow_resize_to_same_host = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.014796] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] arq_binding_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.014936] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] backdoor_port = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015072] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] backdoor_socket = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015240] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] block_device_allocate_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015401] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] block_device_allocate_retries_interval = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015570] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cert = self.pem {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015738] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.015905] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute_monitors = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016085] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] config_dir = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016258] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] config_drive_format = iso9660 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016394] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016563] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] config_source = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016731] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] console_host = devstack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.016896] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] control_exchange = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017075] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cpu_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017241] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] daemon = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017410] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] debug = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017590] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_access_ip_network_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017772] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_availability_zone = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.017933] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_ephemeral_format = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.018104] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_green_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.018343] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.018511] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] default_schedule_zone = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.018669] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] disk_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.018829] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] enable_new_services = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019015] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] enabled_apis = ['osapi_compute'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019190] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] enabled_ssl_apis = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019351] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] flat_injected = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019537] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] force_config_drive = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019717] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] force_raw_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.019892] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] graceful_shutdown_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.020067] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] heal_instance_info_cache_interval = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.020298] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] host = cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.020479] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.020646] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] initial_disk_allocation_ratio = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.020809] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] initial_ram_allocation_ratio = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021040] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021214] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_build_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021376] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_delete_interval = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021582] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_format = [instance: %(uuid)s] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021753] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_name_template = instance-%08x {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.021917] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_usage_audit = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022101] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_usage_audit_period = month {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022272] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022438] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] instances_path = /opt/stack/data/nova/instances {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022648] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] internal_service_availability_zone = internal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022817] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] key = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.022980] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] live_migration_retry_count = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023166] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_color = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023330] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_config_append = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023497] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023656] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_dir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023812] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.023937] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_options = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024107] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_rotate_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024282] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_rotate_interval_type = days {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024449] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] log_rotation_type = none {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024589] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024717] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.024886] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025060] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025192] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025358] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] long_rpc_timeout = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025525] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_concurrent_builds = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025706] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_concurrent_live_migrations = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.025866] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_concurrent_snapshots = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026034] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_local_block_devices = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026196] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_logfile_count = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026352] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] max_logfile_size_mb = 200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026510] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] maximum_instance_delete_attempts = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026684] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metadata_listen = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.026846] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metadata_listen_port = 8775 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027013] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metadata_workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027186] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] migrate_max_retries = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027352] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] mkisofs_cmd = genisoimage {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027562] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] my_block_storage_ip = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027693] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] my_ip = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.027855] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] network_allocate_retries = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028085] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028259] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] osapi_compute_listen = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028427] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] osapi_compute_listen_port = 8774 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028616] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] osapi_compute_unique_server_name_scope = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028801] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] osapi_compute_workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.028968] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] password_length = 12 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029145] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] periodic_enable = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029309] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] periodic_fuzzy_delay = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029487] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] pointer_model = usbtablet {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029651] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] preallocate_images = none {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029812] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] publish_errors = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.029946] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] pybasedir = /opt/stack/nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030120] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ram_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030283] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rate_limit_burst = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030452] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rate_limit_except_level = CRITICAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030614] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rate_limit_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030771] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reboot_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.030929] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reclaim_instance_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031096] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] record = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031265] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reimage_timeout_per_gb = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031479] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] report_interval = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031651] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rescue_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031823] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reserved_host_cpus = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.031983] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reserved_host_disk_mb = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.032158] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reserved_host_memory_mb = 512 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.032321] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] reserved_huge_pages = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.032487] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] resize_confirm_window = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.032677] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] resize_fs_using_block_device = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.032844] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] resume_guests_state_on_host_boot = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033021] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033188] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] rpc_response_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033350] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] run_external_periodic_tasks = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033519] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] running_deleted_instance_action = reap {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033709] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] running_deleted_instance_poll_interval = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.033885] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] running_deleted_instance_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034058] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler_instance_sync_interval = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034232] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_down_time = 720 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034400] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] servicegroup_driver = db {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034558] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] shell_completion = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034721] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] shelved_offload_time = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.034879] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] shelved_poll_interval = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035056] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] shutdown_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035221] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] source_is_ipv6 = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035382] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ssl_only = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035638] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035808] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] sync_power_state_interval = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.035969] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] sync_power_state_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036151] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] syslog_log_facility = LOG_USER {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036312] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] tempdir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036475] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] timeout_nbd = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036645] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] transport_url = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036807] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] update_resources_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.036968] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_cow_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037140] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_eventlog = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037300] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_journal = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037460] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_json = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037617] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_rootwrap_daemon = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037775] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_stderr = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.037931] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] use_syslog = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038099] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vcpu_pin_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038269] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plugging_is_fatal = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038437] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plugging_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038605] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] virt_mkfs = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038768] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] volume_usage_poll_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.038931] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] watch_log_file = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.039112] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] web = /usr/share/spice-html5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 498.039301] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.039478] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.039651] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.039823] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_concurrency.disable_process_locking = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.040479] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.040678] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.040852] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.041087] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.041301] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.041505] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.041700] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.auth_strategy = keystone {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.041876] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.compute_link_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.042070] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.042255] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.dhcp_domain = novalocal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.042459] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.enable_instance_password = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.042632] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.glance_link_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.042806] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043027] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043212] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.instance_list_per_project_cells = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043380] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.list_records_by_skipping_down_cells = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043546] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.local_metadata_per_cell = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043721] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.max_limit = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.043890] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.metadata_cache_expiration = 15 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044079] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.neutron_default_tenant_id = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044257] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.response_validation = warn {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044430] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.use_neutron_default_nets = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044603] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044780] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.044951] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.045139] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.045318] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_dynamic_targets = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.045491] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_jsonfile_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.045670] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.045868] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.backend = dogpile.cache.memcached {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046051] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.backend_argument = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046227] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.config_prefix = cache.oslo {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046400] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.dead_timeout = 60.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046564] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.debug_cache_backend = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046726] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.enable_retry_client = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.046887] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.enable_socket_keepalive = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047070] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.enabled = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047235] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.enforce_fips_mode = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047398] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.expiration_time = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047561] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.hashclient_retry_attempts = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047730] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.hashclient_retry_delay = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.047895] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_dead_retry = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048068] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048235] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048400] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048562] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_pool_maxsize = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048726] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.048888] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_sasl_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049078] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049249] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_socket_timeout = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049446] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.memcache_username = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049579] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.proxies = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049747] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_db = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.049905] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050088] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_sentinel_service_name = mymaster {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050268] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050439] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_server = localhost:6379 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050631] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_socket_timeout = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050805] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.redis_username = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.050968] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.retry_attempts = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.051151] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.retry_delay = 0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.051318] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.socket_keepalive_count = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.051515] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.socket_keepalive_idle = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.051684] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.socket_keepalive_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.051845] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.tls_allowed_ciphers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052030] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.tls_cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052244] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.tls_certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052446] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.tls_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052624] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cache.tls_keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052798] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.052978] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053160] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053342] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.catalog_info = volumev3::publicURL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053504] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053669] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053833] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.cross_az_attach = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.053998] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.054229] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.endpoint_template = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.054410] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.http_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.054610] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.054780] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.054955] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.os_region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055158] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055332] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cinder.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055509] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055673] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.cpu_dedicated_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055833] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.cpu_shared_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.055999] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.image_type_exclude_list = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.056182] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.056347] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.max_concurrent_disk_ops = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.056510] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.max_disk_devices_to_attach = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.056674] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.056845] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057015] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.resource_provider_association_refresh = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057185] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057348] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.shutdown_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057531] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057714] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] conductor.workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.057895] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] console.allowed_origins = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058068] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] console.ssl_ciphers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058242] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] console.ssl_minimum_version = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058413] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] consoleauth.enforce_session_timeout = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058665] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] consoleauth.token_ttl = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058755] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.058912] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059088] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059253] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059418] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059574] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059739] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.059897] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060073] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060235] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060395] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060590] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060759] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.060933] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.service_type = accelerator {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.061124] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.061315] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.061520] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.061691] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.061877] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062054] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] cyborg.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062239] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.backend = sqlalchemy {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062443] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062620] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.connection_debug = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062801] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.connection_parameters = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.062969] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.connection_recycle_time = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.063150] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.connection_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.063312] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.db_inc_retry_interval = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.063476] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.db_max_retries = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.063648] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.db_max_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.063825] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.db_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064011] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.max_overflow = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064359] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.max_pool_size = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064359] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.max_retries = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064504] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064664] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.mysql_wsrep_sync_wait = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.064820] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.pool_timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065062] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065241] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.slave_connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065405] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.sqlite_synchronous = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065567] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] database.use_db_reconnect = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065746] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.backend = sqlalchemy {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.065915] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066090] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.connection_debug = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066264] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.connection_parameters = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066431] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.connection_recycle_time = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066644] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.connection_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066826] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.db_inc_retry_interval = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.066991] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.db_max_retries = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067171] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.db_max_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067332] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.db_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067503] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.max_overflow = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067658] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.max_pool_size = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067817] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.max_retries = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.067986] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068162] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068321] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.pool_timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068481] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068637] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.slave_connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068797] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] api_database.sqlite_synchronous = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.068973] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] devices.enabled_mdev_types = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.069164] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.069336] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ephemeral_storage_encryption.default_format = luks {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.069503] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ephemeral_storage_encryption.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.069666] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.069837] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.api_servers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070006] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070180] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070344] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070503] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070663] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070824] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.070990] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.default_trusted_certificate_ids = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.071167] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.enable_certificate_validation = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.071330] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.enable_rbd_download = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.071517] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.071689] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.071854] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072021] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072181] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072347] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.num_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072539] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.rbd_ceph_conf = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072711] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.rbd_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.072880] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.rbd_pool = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073059] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.rbd_user = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073224] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073385] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073544] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073713] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.service_type = image {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.073875] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074052] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074214] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074373] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074576] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074753] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.verify_glance_signatures = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.074913] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] glance.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.075093] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] guestfs.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.075263] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] mks.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.075636] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.075830] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.manager_interval = 2400 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076018] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.precache_concurrency = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076196] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.remove_unused_base_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076369] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076551] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076741] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] image_cache.subdirectory_name = _base {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.076918] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.api_max_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077097] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.api_retry_interval = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077262] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077425] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077584] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077740] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.077979] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078164] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.conductor_group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078328] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078487] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078669] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078840] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.078999] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079173] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079359] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079512] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.peer_list = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079671] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079828] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.079990] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.serial_console_state_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080163] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080331] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.service_type = baremetal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080491] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.shard = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080652] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080807] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.080964] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.081133] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.081311] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.081490] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ironic.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.081681] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.081853] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] key_manager.fixed_key = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082044] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082212] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.barbican_api_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082371] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.barbican_endpoint = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082565] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.barbican_endpoint_type = public {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082733] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.barbican_region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.082894] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083064] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083230] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083390] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083546] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083709] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.number_of_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.083869] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.retry_delay = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084041] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.send_service_user_token = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084205] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084360] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084519] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.verify_ssl = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084676] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican.verify_ssl_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.084842] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085007] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085172] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085327] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085489] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085648] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085803] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.085962] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086132] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] barbican_service_user.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086297] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.approle_role_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086456] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.approle_secret_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086654] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.kv_mountpoint = secret {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086824] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.kv_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.086986] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.kv_version = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087162] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.namespace = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087320] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.root_token_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087477] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.ssl_ca_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087648] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.timeout = 60.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087808] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.use_ssl = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.087976] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088159] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088320] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088478] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088635] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088793] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.088951] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089120] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089279] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089442] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089594] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089747] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.089901] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090068] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090228] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090382] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090549] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.service_type = identity {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090741] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.090905] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.091074] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.091234] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.091416] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.091600] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] keystone.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.091838] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.connection_uri = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092057] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_mode = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092238] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_model_extra_flags = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092408] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_models = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092580] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_power_governor_high = performance {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092750] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_power_governor_low = powersave {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.092911] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_power_management = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093090] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093258] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.device_detach_attempts = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093420] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.device_detach_timeout = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093583] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.disk_cachemodes = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093741] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.disk_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.093904] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.enabled_perf_events = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094074] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.file_backed_memory = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094242] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.gid_maps = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094399] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.hw_disk_discard = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094556] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.hw_machine_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094725] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_rbd_ceph_conf = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.094890] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095069] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095244] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_rbd_glance_store_name = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095415] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_rbd_pool = rbd {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095587] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_type = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095749] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.images_volume_group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.095911] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.inject_key = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096084] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.inject_partition = -2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096248] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.inject_password = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096414] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.iscsi_iface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096577] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.iser_use_multipath = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096741] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_bandwidth = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.096905] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.097078] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_downtime = 500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.097245] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.097408] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.097576] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_inbound_addr = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.097738] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098160] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_permit_post_copy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098160] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_scheme = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098254] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_timeout_action = abort {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098401] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_tunnelled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098575] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098768] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.live_migration_with_native_tls = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.098933] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.max_queues = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.099112] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.099393] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.099574] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.nfs_mount_options = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.099892] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100086] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100256] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_iser_scan_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100429] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_memory_encrypted_guests = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100589] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100753] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_pcie_ports = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.100921] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.num_volume_scan_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.101101] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.pmem_namespaces = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.101264] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.quobyte_client_cfg = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.101592] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.101773] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rbd_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.101942] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102123] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102288] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rbd_secret_uuid = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102464] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rbd_user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102648] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102826] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.remote_filesystem_transport = ssh {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.102989] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rescue_image_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.103162] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rescue_kernel_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.103322] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rescue_ramdisk_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.103491] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.103652] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.rx_queue_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.103822] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.smbfs_mount_options = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104107] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104282] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.snapshot_compression = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104445] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.snapshot_image_format = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104666] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104832] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.sparse_logical_volumes = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.104995] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.swtpm_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.105180] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.swtpm_group = tss {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.105347] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.swtpm_user = tss {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.105518] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.sysinfo_serial = unique {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.105678] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.tb_cache_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.105836] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.tx_queue_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106006] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.uid_maps = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106175] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.use_virtio_for_bridges = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106344] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.virt_type = kvm {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106511] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.volume_clear = zero {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106676] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.volume_clear_size = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.106843] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.volume_use_multipath = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107018] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_cache_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107189] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107357] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_mount_group = qemu {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107523] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_mount_opts = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107691] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.107999] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.108259] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.vzstorage_mount_user = stack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.108445] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.108628] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.108805] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.108967] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109145] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109311] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109471] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109632] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109804] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.default_floating_pool = public {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.109964] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.110143] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.extension_sync_interval = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.110308] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.http_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.110471] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.110654] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.110837] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111023] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111193] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111364] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.ovs_bridge = br-int {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111565] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.physnets = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111739] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.111904] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.112092] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.service_metadata_proxy = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.112258] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.112472] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.service_type = network {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.112681] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.112854] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113027] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113195] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113379] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113543] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] neutron.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113719] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] notifications.bdms_in_notifications = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.113897] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] notifications.default_level = INFO {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114086] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] notifications.notification_format = unversioned {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114255] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] notifications.notify_on_state_change = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114433] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114644] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] pci.alias = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114821] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] pci.device_spec = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.114987] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] pci.report_in_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115178] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115353] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115525] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115684] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115838] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.115999] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116173] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116334] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116495] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.default_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116654] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.default_domain_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116812] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.116968] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.domain_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117137] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117300] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117457] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117616] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117772] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.117941] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118111] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.project_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118282] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.project_domain_name = Default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118453] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.project_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118628] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.project_name = service {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118797] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.118965] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119134] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119306] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.service_type = placement {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119470] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119631] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119793] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.119955] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.system_scope = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120126] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120287] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.trust_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120444] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.user_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120611] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.user_domain_name = Default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120769] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.user_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.120940] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.username = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.121140] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.121304] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] placement.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.121514] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.cores = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.121688] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.count_usage_from_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.121860] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122045] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.injected_file_content_bytes = 10240 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122218] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.injected_file_path_length = 255 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122385] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.injected_files = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122589] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.instances = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122766] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.key_pairs = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.122933] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.metadata_items = 128 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123113] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.ram = 51200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123278] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.recheck_quota = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123443] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.server_group_members = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123607] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] quota.server_groups = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123782] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.123942] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124117] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.image_metadata_prefilter = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124277] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124439] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.max_attempts = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124602] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.max_placement_results = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124761] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.124918] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.query_placement_for_image_type_support = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125089] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125263] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] scheduler.workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125433] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125601] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125778] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.125945] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.126122] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.126285] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.126446] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.126661] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.126835] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.host_subset_size = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127006] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127174] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127339] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127503] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.isolated_hosts = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127666] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.isolated_images = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127829] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.127990] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128171] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128334] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.pci_in_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128495] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128656] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128819] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.128989] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.129169] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.129336] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.129497] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.track_instance_changes = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.129676] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.129846] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metrics.required = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.130021] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metrics.weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.130191] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.130367] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] metrics.weight_setting = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.130697] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.130874] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131061] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.port_range = 10000:20000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131240] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131414] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131619] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] serial_console.serialproxy_port = 6083 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131798] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.131977] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.132157] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.132319] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.132519] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.132689] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.132851] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.133036] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.send_service_user_token = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.133572] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.133572] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] service_user.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.133572] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.agent_enabled = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.133721] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134026] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134236] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134414] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.html5proxy_port = 6082 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134575] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.image_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134737] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.jpeg_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.134897] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.playback_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135073] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.require_secure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135245] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.server_listen = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135414] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135572] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.streaming_mode = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135731] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] spice.zlib_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.135898] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] upgrade_levels.baseapi = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136079] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] upgrade_levels.compute = auto {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136244] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] upgrade_levels.conductor = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136404] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] upgrade_levels.scheduler = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136573] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136737] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.136895] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137067] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137233] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137394] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137554] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137717] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.137875] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vendordata_dynamic_auth.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138062] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.api_retry_count = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138232] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138394] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.cache_prefix = devstack-image-cache {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138559] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.cluster_name = testcl1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138723] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.connection_pool_size = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.138879] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.console_delay_seconds = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139057] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.datastore_regex = ^datastore.* {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139276] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139450] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.host_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139620] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.host_port = 443 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139788] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.host_username = administrator@vsphere.local {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.139955] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.insecure = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140128] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.integration_bridge = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140293] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.maximum_objects = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140457] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.pbm_default_policy = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140620] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.pbm_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140779] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.pbm_wsdl_location = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.140947] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141119] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.serial_port_proxy_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141279] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.serial_port_service_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141471] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.task_poll_interval = 0.5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141653] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.use_linked_clone = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141825] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.vnc_keymap = en-us {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.141991] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.vnc_port = 5900 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.142172] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vmware.vnc_port_total = 10000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.142360] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.auth_schemes = ['none'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.142568] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.142882] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143086] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143269] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.novncproxy_port = 6080 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143451] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.server_listen = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143628] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143812] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.vencrypt_ca_certs = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.143988] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.vencrypt_client_cert = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.144166] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vnc.vencrypt_client_key = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.144351] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.144517] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_deep_image_inspection = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.144682] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.144846] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145014] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145183] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.disable_rootwrap = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145346] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.enable_numa_live_migration = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145508] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145670] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145831] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.145992] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.libvirt_disable_apic = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146168] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146331] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146493] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146654] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146813] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.146973] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.147148] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.147311] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.147525] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.147709] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.147912] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148107] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.client_socket_timeout = 900 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148279] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.default_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148447] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.keep_alive = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148617] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.max_header_line = 16384 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148778] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.secure_proxy_ssl_header = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.148938] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.ssl_ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149111] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.ssl_cert_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149275] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.ssl_key_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149442] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.tcp_keepidle = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149617] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149783] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] zvm.ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.149945] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] zvm.cloud_connector_url = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.150270] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.150450] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] zvm.reachable_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.150632] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.enforce_new_defaults = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.151048] env[62109]: WARNING oslo_config.cfg [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 498.151243] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.enforce_scope = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.151445] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.policy_default_rule = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.151640] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.151822] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.policy_file = policy.yaml {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152007] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152179] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152343] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152524] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152700] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.152872] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153061] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153246] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.connection_string = messaging:// {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153416] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153589] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.es_doc_type = notification {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153758] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.es_scroll_size = 10000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.153928] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.es_scroll_time = 2m {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154106] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.filter_error_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154279] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.hmac_keys = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154450] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.sentinel_service_name = mymaster {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154621] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.socket_timeout = 0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154786] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.trace_requests = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.154949] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler.trace_sqlalchemy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155146] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler_jaeger.process_tags = {} {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155311] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler_jaeger.service_name_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155475] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] profiler_otlp.service_name_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155663] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] remote_debug.host = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155807] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] remote_debug.port = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.155985] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156164] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156331] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156557] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156676] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156816] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.156975] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157151] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157315] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157485] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157643] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157812] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.157979] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.158164] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.158335] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.158502] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.158665] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.158840] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159007] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159176] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159343] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159510] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159672] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.159840] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160010] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160180] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160345] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160505] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160674] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.160841] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161018] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161195] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161358] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161561] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161739] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.ssl_version = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.161904] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.162110] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.162282] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_notifications.retry = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.162497] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.162690] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_messaging_notifications.transport_url = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.162890] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163082] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163252] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163414] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163581] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163743] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.163905] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164077] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.endpoint_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164241] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164404] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164562] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164730] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.164885] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165055] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165223] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165386] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165546] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.service_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165709] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.165883] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166077] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166244] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166406] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.valid_interfaces = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166567] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_limit.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166734] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_reports.file_event_handler = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.166901] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167076] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] oslo_reports.log_dir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167252] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167412] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167574] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167741] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.167903] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168075] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168249] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168409] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168567] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168736] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.168910] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169097] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] vif_plug_ovs_privileged.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169274] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.flat_interface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169465] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169634] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169804] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.169974] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.170158] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.170328] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.170490] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.170668] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.170850] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.isolate_vif = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171023] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171194] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171365] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171563] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.ovsdb_interface = native {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171733] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] os_vif_ovs.per_port_bridge = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.171921] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.capabilities = [21] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172113] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172278] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172448] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172618] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172776] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] privsep_osbrick.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.172951] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173127] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173287] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173455] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173618] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173775] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] nova_sys_admin.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 498.173908] env[62109]: DEBUG oslo_service.service [None req-baae3f3d-493a-4760-9b66-5c6508e09b3c None None] ******************************************************************************** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 498.174417] env[62109]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 498.677469] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Getting list of instances from cluster (obj){ [ 498.677469] env[62109]: value = "domain-c8" [ 498.677469] env[62109]: _type = "ClusterComputeResource" [ 498.677469] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 498.678607] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960b098b-3ea1-4b7b-9f8a-6dd437e8e684 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.688233] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 498.688763] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 498.689245] env[62109]: INFO nova.virt.node [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Generated node identity 574e9717-c25e-453d-8028-45d9e2f95398 [ 498.689485] env[62109]: INFO nova.virt.node [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Wrote node identity 574e9717-c25e-453d-8028-45d9e2f95398 to /opt/stack/data/n-cpu-1/compute_id [ 499.192708] env[62109]: WARNING nova.compute.manager [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Compute nodes ['574e9717-c25e-453d-8028-45d9e2f95398'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 500.198984] env[62109]: INFO nova.compute.manager [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 501.204892] env[62109]: WARNING nova.compute.manager [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 501.205268] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.205402] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.205552] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 501.205707] env[62109]: DEBUG nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 501.206631] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f772f6-53d9-42dc-a2a0-0473e7986da1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.214517] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1eadbf-e657-4f18-954a-8a32d7c990f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.227662] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9567300e-b40b-49ee-abc4-222130ab6e03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.233819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6dc205-48ae-4508-bafc-3ba59197aa65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.262289] env[62109]: DEBUG nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181569MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 501.262440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.262619] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.764786] env[62109]: WARNING nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] No compute node record for cpu-1:574e9717-c25e-453d-8028-45d9e2f95398: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 574e9717-c25e-453d-8028-45d9e2f95398 could not be found. [ 502.269161] env[62109]: INFO nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 574e9717-c25e-453d-8028-45d9e2f95398 [ 503.780813] env[62109]: DEBUG nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 503.780813] env[62109]: DEBUG nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 503.929568] env[62109]: INFO nova.scheduler.client.report [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] [req-9347173a-e53c-480c-91bd-0d63e5b9ab72] Created resource provider record via placement API for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 503.947047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5602ab4c-33ed-44df-b616-b2eaf2114aac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.955104] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d316d9b6-e981-4df9-8ffd-97cf5df9c464 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.986744] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b35e3a-bc62-4ea7-9011-6f60f5da2dbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.993715] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0c1c56-06af-41d8-9898-6e6302f2eb83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.006525] env[62109]: DEBUG nova.compute.provider_tree [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 504.546297] env[62109]: DEBUG nova.scheduler.client.report [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 504.546536] env[62109]: DEBUG nova.compute.provider_tree [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 0 to 1 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 504.546734] env[62109]: DEBUG nova.compute.provider_tree [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 504.595621] env[62109]: DEBUG nova.compute.provider_tree [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 1 to 2 during operation: update_traits {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 505.100096] env[62109]: DEBUG nova.compute.resource_tracker [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 505.100449] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.838s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 505.100506] env[62109]: DEBUG nova.service [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Creating RPC server for service compute {{(pid=62109) start /opt/stack/nova/nova/service.py:186}} [ 505.113293] env[62109]: DEBUG nova.service [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] Join ServiceGroup membership for this service compute {{(pid=62109) start /opt/stack/nova/nova/service.py:203}} [ 505.113481] env[62109]: DEBUG nova.servicegroup.drivers.db [None req-d722ba74-499c-425d-86df-899c1d9fdbf5 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62109) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 541.567966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "53bae73f-2aec-41c4-bd62-aeedbf162258" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.568313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "53bae73f-2aec-41c4-bd62-aeedbf162258" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.868281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.868690] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.071840] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.373569] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.611671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.611938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.613923] env[62109]: INFO nova.compute.claims [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.806208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "13988400-7b35-4175-9410-84eff918111d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.807315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.908213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.310663] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 543.664121] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquiring lock "892142c4-8d74-4c27-95ed-2edc07def573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.664408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "892142c4-8d74-4c27-95ed-2edc07def573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.708417] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2409daa5-7b34-4695-b121-7e80de298b22 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.716376] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f6e7cb-e57b-4099-8e17-9e81fb5ad628 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.749273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfffcd59-d79d-48ac-8a70-bdd5748266f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.757911] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4644ccc-a733-4ca1-bbd1-0e03467a91c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.771994] env[62109]: DEBUG nova.compute.provider_tree [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.837671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.142459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquiring lock "a10c8e2e-9b5c-498e-81dc-ca69af0ff123" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.142689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "a10c8e2e-9b5c-498e-81dc-ca69af0ff123" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.170542] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 544.274818] env[62109]: DEBUG nova.scheduler.client.report [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.645889] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 544.703930] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.780298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.780989] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 544.784185] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.876s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.785065] env[62109]: INFO nova.compute.claims [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.179637] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.290654] env[62109]: DEBUG nova.compute.utils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.296897] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 545.297258] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.587665] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquiring lock "46bb583c-bc67-4b18-903d-afbbf8248691" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.587903] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "46bb583c-bc67-4b18-903d-afbbf8248691" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.803877] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 545.925557] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c53391-d655-4127-a9c3-beadcbc49056 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.934092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9b7f1f-9acb-4026-8da5-accf463d578b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.967883] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed8169a-dc62-49b0-b305-518fbd62fc7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.979482] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ccf561-0bca-41c3-bac9-897c0ee27315 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.003519] env[62109]: DEBUG nova.compute.provider_tree [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.092260] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 546.196126] env[62109]: DEBUG nova.policy [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64678c7edb1e449da933c3438fb88353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9406794e594260a4373c674ec12cf9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 546.512032] env[62109]: DEBUG nova.scheduler.client.report [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.625171] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.819277] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 546.852254] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.853312] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.853677] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.854250] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.856956] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.856956] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.856956] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.856956] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.856956] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.857315] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.857315] env[62109]: DEBUG nova.virt.hardware [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.857315] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c57949-b633-45d1-8c3d-e2c87e8c1cc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.875085] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7253f31c-cc9d-4be3-806f-fd52a8059b2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.891775] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7025a7a-1a57-4b90-aea7-7d8664b8f135 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.015578] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.232s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.016377] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 547.021630] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.184s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.023171] env[62109]: INFO nova.compute.claims [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.115887] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.141402] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Successfully created port: 789aa4a4-faf6-4751-a092-d070d022284b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.523033] env[62109]: DEBUG nova.compute.utils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.528134] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 547.528134] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 547.600131] env[62109]: DEBUG nova.policy [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 547.626019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Getting list of instances from cluster (obj){ [ 547.626019] env[62109]: value = "domain-c8" [ 547.626019] env[62109]: _type = "ClusterComputeResource" [ 547.626019] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 547.626019] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6d1bce-78b6-4b4f-a03c-abe33397fc52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.639946] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 547.639946] env[62109]: WARNING nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] While synchronizing instance power states, found 3 instances in the database and 0 instances on the hypervisor. [ 547.639946] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 53bae73f-2aec-41c4-bd62-aeedbf162258 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 547.639946] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 11f4e78d-12c7-4f93-8104-134d337ee6e0 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 547.640918] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 13988400-7b35-4175-9410-84eff918111d {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 547.640918] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "53bae73f-2aec-41c4-bd62-aeedbf162258" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.641093] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.641525] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "13988400-7b35-4175-9410-84eff918111d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.641847] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.642446] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Getting list of instances from cluster (obj){ [ 547.642446] env[62109]: value = "domain-c8" [ 547.642446] env[62109]: _type = "ClusterComputeResource" [ 547.642446] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 547.645322] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4d39d9-87dc-4144-99dd-c9caa5bb3a20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.656515] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 548.026503] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 548.164350] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0210b588-efae-49a8-99bc-bcc9a7bba6aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.177047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a88b66-6bde-4d5b-ad46-d94c3fe11d80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.236136] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c2d158-1fd9-47b7-8f0b-6cbfa166dfc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.243363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dd2a16-f23f-47d5-bd84-2d580c41cb6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.256615] env[62109]: DEBUG nova.compute.provider_tree [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.261328] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Successfully created port: 805389e0-22e2-4986-a49e-ca570e7c3a80 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.762281] env[62109]: DEBUG nova.scheduler.client.report [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.041069] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 549.081190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.081504] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.081820] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.081924] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.090190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.090190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.090190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.090190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.090190] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.090393] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.090393] env[62109]: DEBUG nova.virt.hardware [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.093918] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564d1f8d-b273-4369-9302-6a178364ef57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.106375] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea18832-b95c-41df-a8d6-185c55f3c8c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.270407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.247s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.271163] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 549.276464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.573s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.280355] env[62109]: INFO nova.compute.claims [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.787829] env[62109]: DEBUG nova.compute.utils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.792767] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 550.293400] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 550.440843] env[62109]: ERROR nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 550.440843] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.440843] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.440843] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.440843] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.440843] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.440843] env[62109]: ERROR nova.compute.manager raise self.value [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.440843] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 550.440843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.440843] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 550.441352] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.441352] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 550.441352] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 550.441352] env[62109]: ERROR nova.compute.manager [ 550.441352] env[62109]: Traceback (most recent call last): [ 550.441352] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 550.441352] env[62109]: listener.cb(fileno) [ 550.441352] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.441352] env[62109]: result = function(*args, **kwargs) [ 550.441352] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.441352] env[62109]: return func(*args, **kwargs) [ 550.441352] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.441352] env[62109]: raise e [ 550.441352] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.441352] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 550.441352] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.441352] env[62109]: created_port_ids = self._update_ports_for_instance( [ 550.441352] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.441352] env[62109]: with excutils.save_and_reraise_exception(): [ 550.441352] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.441352] env[62109]: self.force_reraise() [ 550.441352] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.441352] env[62109]: raise self.value [ 550.441352] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.441352] env[62109]: updated_port = self._update_port( [ 550.441352] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.441352] env[62109]: _ensure_no_port_binding_failure(port) [ 550.441352] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.441352] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 550.445376] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 550.445376] env[62109]: Removing descriptor: 15 [ 550.447785] env[62109]: ERROR nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Traceback (most recent call last): [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] yield resources [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.driver.spawn(context, instance, image_meta, [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] vm_ref = self.build_virtual_machine(instance, [ 550.447785] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] for vif in network_info: [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self._sync_wrapper(fn, *args, **kwargs) [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.wait() [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self[:] = self._gt.wait() [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self._exit_event.wait() [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.448150] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] result = hub.switch() [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self.greenlet.switch() [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] result = function(*args, **kwargs) [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return func(*args, **kwargs) [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise e [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] nwinfo = self.network_api.allocate_for_instance( [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] created_port_ids = self._update_ports_for_instance( [ 550.448483] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] with excutils.save_and_reraise_exception(): [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.force_reraise() [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise self.value [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] updated_port = self._update_port( [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] _ensure_no_port_binding_failure(port) [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise exception.PortBindingFailed(port_id=port['id']) [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 550.448803] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] [ 550.449333] env[62109]: INFO nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Terminating instance [ 550.451250] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.451421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquired lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.451583] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.470354] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df0f46b-9478-4129-9df6-f340c57a65be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.481795] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7beb3f5d-3d38-40d2-b204-2d96b4249a60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.490479] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquiring lock "b52ff4f8-2341-493e-8587-b8d1d12efb7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.490702] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "b52ff4f8-2341-493e-8587-b8d1d12efb7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.521954] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7c6f7a-1d5f-4fa4-87f7-04e46371a208 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.530867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8210d7e9-7d2c-4933-8b99-e255eabd479e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.545742] env[62109]: DEBUG nova.compute.provider_tree [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.995061] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.009328] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.052878] env[62109]: DEBUG nova.scheduler.client.report [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.083926] env[62109]: DEBUG nova.compute.manager [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Received event network-changed-789aa4a4-faf6-4751-a092-d070d022284b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 551.083926] env[62109]: DEBUG nova.compute.manager [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Refreshing instance network info cache due to event network-changed-789aa4a4-faf6-4751-a092-d070d022284b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 551.083926] env[62109]: DEBUG oslo_concurrency.lockutils [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] Acquiring lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.268345] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.307820] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 551.342151] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.342306] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.342412] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.342581] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.342721] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.342954] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.344896] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.345143] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.345339] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.346088] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.346420] env[62109]: DEBUG nova.virt.hardware [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.347659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e40d8e-11c8-4a3d-8283-46da50c52d48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.357971] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20696957-6423-47dd-abad-8a8d3482a6b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.371735] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 551.383467] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 551.383895] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08aa957c-0c2a-4718-a7de-d924664683ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.397322] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Created folder: OpenStack in parent group-v4. [ 551.397322] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating folder: Project (3003e722712b4b979d61122166366ab6). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 551.397322] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f71ac8e4-0de0-437c-8200-2c2d632be92c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.406836] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Created folder: Project (3003e722712b4b979d61122166366ab6) in parent group-v244329. [ 551.406955] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating folder: Instances. Parent ref: group-v244330. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 551.407525] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b7c19d0-87be-4320-b9cd-98667692a789 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.416121] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Created folder: Instances in parent group-v244330. [ 551.416121] env[62109]: DEBUG oslo.service.loopingcall [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.416250] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 551.416737] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59131655-e867-4dcd-9970-8d5749abd112 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.434226] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 551.434226] env[62109]: value = "task-1116087" [ 551.434226] env[62109]: _type = "Task" [ 551.434226] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.442298] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116087, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.534846] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.560149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.560149] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 551.560149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.381s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.561493] env[62109]: INFO nova.compute.claims [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.592743] env[62109]: ERROR nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 551.592743] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.592743] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 551.592743] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 551.592743] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.592743] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.592743] env[62109]: ERROR nova.compute.manager raise self.value [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 551.592743] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 551.592743] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.592743] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 551.593215] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.593215] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 551.593215] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 551.593215] env[62109]: ERROR nova.compute.manager [ 551.593215] env[62109]: Traceback (most recent call last): [ 551.593215] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 551.593215] env[62109]: listener.cb(fileno) [ 551.593215] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.593215] env[62109]: result = function(*args, **kwargs) [ 551.593215] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 551.593215] env[62109]: return func(*args, **kwargs) [ 551.593215] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 551.593215] env[62109]: raise e [ 551.593215] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.593215] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 551.593215] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 551.593215] env[62109]: created_port_ids = self._update_ports_for_instance( [ 551.593215] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 551.593215] env[62109]: with excutils.save_and_reraise_exception(): [ 551.593215] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.593215] env[62109]: self.force_reraise() [ 551.593215] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.593215] env[62109]: raise self.value [ 551.593215] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 551.593215] env[62109]: updated_port = self._update_port( [ 551.593215] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.593215] env[62109]: _ensure_no_port_binding_failure(port) [ 551.593215] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.593215] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 551.594065] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 551.594065] env[62109]: Removing descriptor: 16 [ 551.594065] env[62109]: ERROR nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Traceback (most recent call last): [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] yield resources [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.driver.spawn(context, instance, image_meta, [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 551.594065] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] vm_ref = self.build_virtual_machine(instance, [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] vif_infos = vmwarevif.get_vif_info(self._session, [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] for vif in network_info: [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self._sync_wrapper(fn, *args, **kwargs) [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.wait() [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self[:] = self._gt.wait() [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self._exit_event.wait() [ 551.594371] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] result = hub.switch() [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self.greenlet.switch() [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] result = function(*args, **kwargs) [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return func(*args, **kwargs) [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise e [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] nwinfo = self.network_api.allocate_for_instance( [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 551.594709] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] created_port_ids = self._update_ports_for_instance( [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] with excutils.save_and_reraise_exception(): [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.force_reraise() [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise self.value [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] updated_port = self._update_port( [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] _ensure_no_port_binding_failure(port) [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.595042] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise exception.PortBindingFailed(port_id=port['id']) [ 551.595362] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 551.595362] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] [ 551.595362] env[62109]: INFO nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Terminating instance [ 551.605872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.606056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.606236] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 551.607583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquiring lock "6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.607808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.772203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Releasing lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.773996] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 551.773996] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 551.774451] env[62109]: DEBUG oslo_concurrency.lockutils [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] Acquired lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.778524] env[62109]: DEBUG nova.network.neutron [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Refreshing network info cache for port 789aa4a4-faf6-4751-a092-d070d022284b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 551.778524] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9a20368-0884-49d3-8f8b-02c9532a370f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.786626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde746ae-0d60-4185-b576-a303100521b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.813989] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53bae73f-2aec-41c4-bd62-aeedbf162258 could not be found. [ 551.814193] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 551.814752] env[62109]: INFO nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Took 0.04 seconds to destroy the instance on the hypervisor. [ 551.815108] env[62109]: DEBUG oslo.service.loopingcall [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.815329] env[62109]: DEBUG nova.compute.manager [-] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.815577] env[62109]: DEBUG nova.network.neutron [-] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 551.862010] env[62109]: DEBUG nova.compute.manager [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Received event network-changed-805389e0-22e2-4986-a49e-ca570e7c3a80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 551.862127] env[62109]: DEBUG nova.compute.manager [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Refreshing instance network info cache due to event network-changed-805389e0-22e2-4986-a49e-ca570e7c3a80. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 551.862913] env[62109]: DEBUG oslo_concurrency.lockutils [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] Acquiring lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.868968] env[62109]: DEBUG nova.network.neutron [-] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.944760] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116087, 'name': CreateVM_Task, 'duration_secs': 0.280753} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.945511] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 551.946904] env[62109]: DEBUG oslo_vmware.service [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e50a56-fdfc-4b87-9e4d-572a7ef1bda1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.953112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.955041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.955041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 551.955041] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc8291a9-a66d-46be-b9e4-51f488ab0814 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.965021] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 551.965021] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52025068-dd33-f728-0d93-0f23e76b46d7" [ 551.965021] env[62109]: _type = "Task" [ 551.965021] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.972165] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52025068-dd33-f728-0d93-0f23e76b46d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.068530] env[62109]: DEBUG nova.compute.utils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.072449] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 552.073710] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.115231] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.180121] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.260587] env[62109]: DEBUG nova.policy [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbf87d3d4db44df6a783c0468d25db02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '789393ef884f40f3bc4ba775b4c13a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 552.338013] env[62109]: DEBUG nova.network.neutron [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.374559] env[62109]: DEBUG nova.network.neutron [-] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.476805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.476805] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 552.476805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.476805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.477104] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 552.477378] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-070c9e02-6f29-4ee4-b30d-bbf35fa88eeb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.497674] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 552.497855] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 552.498798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd77e7-7a58-4040-9804-c671f44492c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.506068] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0709055e-687d-4ccd-9557-71705dbcd4cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.512137] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 552.512137] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528de01b-0ab1-12d8-7a5e-436c0f702101" [ 552.512137] env[62109]: _type = "Task" [ 552.512137] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.523546] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528de01b-0ab1-12d8-7a5e-436c0f702101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.573496] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 552.637728] env[62109]: DEBUG nova.network.neutron [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.649528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.652596] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.788268] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788f65b0-1ba4-4547-85bc-c5987c3198c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.796126] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de736e63-c76b-4dd6-97c9-b000bedc27ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.835840] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c822e3-e4dc-48a8-b5f5-8db2a738a528 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.844079] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f738da13-a08e-44a4-a0ef-fc8e0e6b75d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.858864] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.877829] env[62109]: INFO nova.compute.manager [-] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Took 1.06 seconds to deallocate network for instance. [ 552.883438] env[62109]: DEBUG nova.compute.claims [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 552.883675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.027568] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 553.027891] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating directory with path [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 553.028153] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-229d1cb4-080a-464d-8b95-0f9430e4f89e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.049957] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Created directory with path [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 553.049957] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Fetch image to [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 553.049957] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Downloading image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk on the data store datastore2 {{(pid=62109) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 553.049957] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7fad0c-1dce-4a39-9023-1ad50a50127f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.059433] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c07c15-b01d-4b96-ba1b-e6a36c00111c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.075115] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973bbb0c-61ea-47b9-ba77-1b9729c0e3a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.119382] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b87cbe-85b7-4e4c-a6ea-5df3d74d2a26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.127457] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-07830441-88d1-40da-bcf2-3f761b95e235 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.145194] env[62109]: DEBUG oslo_concurrency.lockutils [req-26e488e0-7171-41e6-8c17-df147707bb73 req-2e432d0d-220f-4f08-83a3-cd615c0fc202 service nova] Releasing lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.161599] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.162184] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.162414] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 553.162956] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Downloading image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to the data store datastore2 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 553.165066] env[62109]: DEBUG oslo_concurrency.lockutils [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] Acquired lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.166571] env[62109]: DEBUG nova.network.neutron [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Refreshing network info cache for port 805389e0-22e2-4986-a49e-ca570e7c3a80 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 553.170033] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2cf197f3-8295-4402-a16f-5d77174a3380 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.178584] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b3d197-de9a-44e2-9b6a-8d00d0b8a345 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.207821] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 11f4e78d-12c7-4f93-8104-134d337ee6e0 could not be found. [ 553.207821] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 553.207821] env[62109]: INFO nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 553.207821] env[62109]: DEBUG oslo.service.loopingcall [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.207821] env[62109]: DEBUG nova.compute.manager [-] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.207821] env[62109]: DEBUG nova.network.neutron [-] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 553.254528] env[62109]: DEBUG oslo_vmware.rw_handles [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 553.361408] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.433141] env[62109]: DEBUG nova.network.neutron [-] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.587960] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 553.618512] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.619304] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.619304] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.619304] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.619304] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.620422] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.620422] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.620422] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.620422] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.620787] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.620979] env[62109]: DEBUG nova.virt.hardware [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.622008] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24127556-9e06-471b-b589-f6e34eada05c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.634730] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cce239-6ded-4815-8485-905b24355c35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.663263] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 553.669131] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 553.669426] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 553.670431] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 553.768856] env[62109]: DEBUG nova.network.neutron [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.871024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.871024] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 553.876102] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.249s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.877658] env[62109]: INFO nova.compute.claims [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.934284] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Successfully created port: a9554fbe-668e-4592-95ed-1228e9eabbe7 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.936900] env[62109]: DEBUG nova.network.neutron [-] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.956686] env[62109]: DEBUG oslo_vmware.rw_handles [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 553.956883] env[62109]: DEBUG oslo_vmware.rw_handles [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 554.095381] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Downloaded image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk on the data store datastore2 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 554.096642] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 554.096642] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Copying Virtual Disk [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk to [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 554.096937] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e7a5dfc-7158-43a4-84a8-08bc310fefa4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.108356] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 554.108356] env[62109]: value = "task-1116088" [ 554.108356] env[62109]: _type = "Task" [ 554.108356] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.122020] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.174751] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 554.174751] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 554.174751] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 13988400-7b35-4175-9410-84eff918111d] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 554.175116] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 554.175116] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 554.175116] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 554.176236] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.176236] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.176236] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.179438] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.179720] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.180989] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.180989] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 554.180989] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.382547] env[62109]: DEBUG nova.compute.utils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.385463] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 554.385790] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 554.445750] env[62109]: INFO nova.compute.manager [-] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Took 1.24 seconds to deallocate network for instance. [ 554.449159] env[62109]: DEBUG nova.compute.claims [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.449320] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.458595] env[62109]: DEBUG nova.network.neutron [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.619802] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116088, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.685080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.695217] env[62109]: DEBUG nova.compute.manager [req-97530849-ca0f-4047-89ae-095902a98f1a req-b3b68075-ee11-4ab5-a151-29dcd6f66ff7 service nova] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Received event network-vif-deleted-789aa4a4-faf6-4751-a092-d070d022284b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.867574] env[62109]: DEBUG nova.policy [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4e35b8cfd3c4e4887789dedd8bed229', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09f1016066de455ea344581fd6885316', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 554.888689] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 554.940598] env[62109]: DEBUG nova.compute.manager [req-fb6d422e-a0e5-4c62-9d96-27cb4bd127e1 req-9edd5228-91d2-48ef-b7ab-8c303829952f service nova] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Received event network-vif-deleted-805389e0-22e2-4986-a49e-ca570e7c3a80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.965090] env[62109]: DEBUG oslo_concurrency.lockutils [req-e8b44e1c-eec2-4f90-bd4e-319c33ea1614 req-f08fe25e-6fbf-443b-911a-c488c697cc5d service nova] Releasing lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.127715] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656233} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.127715] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Copied Virtual Disk [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk to [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 555.127715] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleting the datastore file [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 555.127715] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf50856e-4654-4e1d-8a3a-85baa4cd1f60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.133841] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 555.133841] env[62109]: value = "task-1116089" [ 555.133841] env[62109]: _type = "Task" [ 555.133841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.139141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910f21e0-4262-4aea-9fd6-07629b799c3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.148705] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.156554] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d96da9-2505-49b9-95d7-ba1bfa3c557a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.190579] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61979398-51a5-4bb6-bca8-5e816cc7ed3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.198405] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcca213-83d1-494a-bb86-44db1098d807 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.215055] env[62109]: DEBUG nova.compute.provider_tree [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.646825] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023164} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.647114] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 555.647408] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Moving file from [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8. {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 555.647625] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f3ba90ae-2a45-4798-b10e-7a7d97ba8e7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.656530] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 555.656530] env[62109]: value = "task-1116090" [ 555.656530] env[62109]: _type = "Task" [ 555.656530] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.666581] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116090, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.717728] env[62109]: DEBUG nova.scheduler.client.report [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.908639] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 555.937496] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.937496] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.937496] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.937737] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.937737] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.937737] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.937894] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.938113] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.938306] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.938471] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.938986] env[62109]: DEBUG nova.virt.hardware [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.939519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3744ab5-78a3-43c0-a1ca-8be0ddecab2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.947683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730e2bd9-4fb1-42c6-af76-1b6901d413dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.166948] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116090, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025726} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.166948] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] File moved {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 556.166948] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Cleaning up location [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 556.166948] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleting the datastore file [datastore2] vmware_temp/9c53efe2-b4c9-45eb-bbba-2aeb43f59078 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 556.167368] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b625753b-0560-490a-8966-7c290e15656d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.175025] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 556.175025] env[62109]: value = "task-1116091" [ 556.175025] env[62109]: _type = "Task" [ 556.175025] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.184988] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.222111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.224987] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 556.225760] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.691s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.227606] env[62109]: INFO nova.compute.claims [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.687674] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027426} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.687952] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 556.688692] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e58d02e8-9c78-44a5-a4c2-20d485139800 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.694572] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 556.694572] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5259797f-9e5b-dc94-01be-455d4b1f7f81" [ 556.694572] env[62109]: _type = "Task" [ 556.694572] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.705915] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5259797f-9e5b-dc94-01be-455d4b1f7f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.734906] env[62109]: DEBUG nova.compute.utils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.741249] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 556.741661] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.214097] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5259797f-9e5b-dc94-01be-455d4b1f7f81, 'name': SearchDatastore_Task, 'duration_secs': 0.03133} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.214097] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.214097] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 13988400-7b35-4175-9410-84eff918111d/13988400-7b35-4175-9410-84eff918111d.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 557.214097] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-132a0414-d976-4a3c-ac8a-08c3ac727e41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.223819] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 557.223819] env[62109]: value = "task-1116092" [ 557.223819] env[62109]: _type = "Task" [ 557.223819] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.235045] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.246094] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 557.377007] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Successfully created port: 6eb0afc7-671f-4f09-86b2-4de081c5656d {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.385832] env[62109]: DEBUG nova.policy [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b76c28026ee45818e5057f8c5a85525', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '978facd2f87648c886aa728ab575b492', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 557.429699] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb76e4d-9a2e-4906-8afd-fbd0bae98cf7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.439705] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b1e44f-00cd-48e9-8359-55782f50019e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.475614] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118d856f-ee2b-4d76-b49f-3535c10ac6d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.483839] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d887ede4-1bb5-4f1f-a5da-2df6ec367a8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.498938] env[62109]: DEBUG nova.compute.provider_tree [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.739490] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116092, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.002218] env[62109]: DEBUG nova.scheduler.client.report [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.243154] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812209} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.243298] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 13988400-7b35-4175-9410-84eff918111d/13988400-7b35-4175-9410-84eff918111d.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 558.243586] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 558.243905] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-953349ea-2350-4099-8f61-3f30d074f121 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.254307] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 558.254307] env[62109]: value = "task-1116093" [ 558.254307] env[62109]: _type = "Task" [ 558.254307] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.261032] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 558.270905] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116093, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.307054] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.307054] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.307216] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.307632] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.307632] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.307756] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.308049] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.308287] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.308497] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.309098] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.309098] env[62109]: DEBUG nova.virt.hardware [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.310148] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dbac59-5a28-4bc0-b064-c5992360aa00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.319422] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b45688-aa29-4aee-a1dc-223a6958f4e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.382366] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquiring lock "41eeecaa-8514-4550-989e-43db00dff6bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.383308] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "41eeecaa-8514-4550-989e-43db00dff6bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.518637] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.519387] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 558.530204] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.880s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.536037] env[62109]: INFO nova.compute.claims [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.776270] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116093, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062129} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.777249] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 558.779561] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656aa5ca-4ab2-4169-a07b-8952497d5861 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.820231] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 13988400-7b35-4175-9410-84eff918111d/13988400-7b35-4175-9410-84eff918111d.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 558.820558] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f159774-c460-41b7-b300-c59558dd9773 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.846530] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 558.846530] env[62109]: value = "task-1116094" [ 558.846530] env[62109]: _type = "Task" [ 558.846530] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.859819] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116094, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.888139] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 559.033800] env[62109]: DEBUG nova.compute.utils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.033800] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 559.036264] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 559.206587] env[62109]: DEBUG nova.policy [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa653ce0ca3644ea953b5bf9e6807319', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fb27ce582cb4437860f06c3ea5b5c70', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 559.361170] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116094, 'name': ReconfigVM_Task, 'duration_secs': 0.302339} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.361170] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 13988400-7b35-4175-9410-84eff918111d/13988400-7b35-4175-9410-84eff918111d.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 559.361493] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f20ee3b-23f1-4839-a324-7a274970edfa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.369836] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 559.369836] env[62109]: value = "task-1116095" [ 559.369836] env[62109]: _type = "Task" [ 559.369836] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.383018] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116095, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.419275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.540521] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 559.862633] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2704d7c-11ad-4c5d-91e9-b772458e2ad0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.883932] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbda59b2-fa74-4aed-86fd-e49325048fc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.892920] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116095, 'name': Rename_Task, 'duration_secs': 0.161588} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.926360] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 559.926360] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-556cbcdf-f9b1-464e-97ac-06ec2964317a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.928892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871ed07d-1bfb-4456-9de0-8dccd11f1b46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.941543] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b580f7a3-d480-412b-b7b2-27bea368dac6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.947249] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 559.947249] env[62109]: value = "task-1116100" [ 559.947249] env[62109]: _type = "Task" [ 559.947249] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.966332] env[62109]: DEBUG nova.compute.provider_tree [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.976119] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116100, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.193914] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Successfully created port: c8e5852b-f51f-4721-bedd-abcab4c157ad {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.230282] env[62109]: ERROR nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 560.230282] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.230282] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.230282] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.230282] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.230282] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.230282] env[62109]: ERROR nova.compute.manager raise self.value [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.230282] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 560.230282] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.230282] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 560.231113] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.231113] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 560.231113] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 560.231113] env[62109]: ERROR nova.compute.manager [ 560.231113] env[62109]: Traceback (most recent call last): [ 560.231113] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 560.231113] env[62109]: listener.cb(fileno) [ 560.231113] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.231113] env[62109]: result = function(*args, **kwargs) [ 560.231113] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.231113] env[62109]: return func(*args, **kwargs) [ 560.231113] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.231113] env[62109]: raise e [ 560.231113] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.231113] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 560.231113] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.231113] env[62109]: created_port_ids = self._update_ports_for_instance( [ 560.231113] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.231113] env[62109]: with excutils.save_and_reraise_exception(): [ 560.231113] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.231113] env[62109]: self.force_reraise() [ 560.231113] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.231113] env[62109]: raise self.value [ 560.231113] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.231113] env[62109]: updated_port = self._update_port( [ 560.231113] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.231113] env[62109]: _ensure_no_port_binding_failure(port) [ 560.231113] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.231113] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 560.231925] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 560.231925] env[62109]: Removing descriptor: 16 [ 560.231925] env[62109]: ERROR nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Traceback (most recent call last): [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] yield resources [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.driver.spawn(context, instance, image_meta, [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self._vmops.spawn(context, instance, image_meta, injected_files, [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 560.231925] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] vm_ref = self.build_virtual_machine(instance, [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] vif_infos = vmwarevif.get_vif_info(self._session, [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] for vif in network_info: [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self._sync_wrapper(fn, *args, **kwargs) [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.wait() [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self[:] = self._gt.wait() [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self._exit_event.wait() [ 560.232293] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] result = hub.switch() [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self.greenlet.switch() [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] result = function(*args, **kwargs) [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return func(*args, **kwargs) [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise e [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] nwinfo = self.network_api.allocate_for_instance( [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.232652] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] created_port_ids = self._update_ports_for_instance( [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] with excutils.save_and_reraise_exception(): [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.force_reraise() [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise self.value [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] updated_port = self._update_port( [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] _ensure_no_port_binding_failure(port) [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.233060] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise exception.PortBindingFailed(port_id=port['id']) [ 560.233643] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 560.233643] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] [ 560.233643] env[62109]: INFO nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Terminating instance [ 560.235753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquiring lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.235913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquired lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.236090] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.338944] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Successfully created port: 186a3613-a077-4a81-8822-8c8b454bd666 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.461156] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116100, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.474032] env[62109]: DEBUG nova.scheduler.client.report [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.554421] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 560.597334] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.597850] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.599316] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.599316] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.599316] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.599316] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.600703] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.600703] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.600703] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.600703] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.600703] env[62109]: DEBUG nova.virt.hardware [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.601526] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50128dd-c45c-455d-af9f-51c499ce08c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.611649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf16c3d-51ec-413c-bdc6-5275a8becf87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.853553] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.960212] env[62109]: DEBUG oslo_vmware.api [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116100, 'name': PowerOnVM_Task, 'duration_secs': 0.531882} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.960526] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 560.960707] env[62109]: INFO nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Took 9.65 seconds to spawn the instance on the hypervisor. [ 560.961246] env[62109]: DEBUG nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 560.962343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ffa90b-66ef-4103-acf4-b970ba90f689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.978179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.978608] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 560.981947] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.098s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.034784] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.044603] env[62109]: ERROR nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 561.044603] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.044603] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.044603] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.044603] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.044603] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.044603] env[62109]: ERROR nova.compute.manager raise self.value [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.044603] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.044603] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.044603] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.045105] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.045105] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.045105] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 561.045105] env[62109]: ERROR nova.compute.manager [ 561.045105] env[62109]: Traceback (most recent call last): [ 561.045105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.045105] env[62109]: listener.cb(fileno) [ 561.045105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.045105] env[62109]: result = function(*args, **kwargs) [ 561.045105] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.045105] env[62109]: return func(*args, **kwargs) [ 561.045105] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.045105] env[62109]: raise e [ 561.045105] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.045105] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 561.045105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.045105] env[62109]: created_port_ids = self._update_ports_for_instance( [ 561.045105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.045105] env[62109]: with excutils.save_and_reraise_exception(): [ 561.045105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.045105] env[62109]: self.force_reraise() [ 561.045105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.045105] env[62109]: raise self.value [ 561.045105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.045105] env[62109]: updated_port = self._update_port( [ 561.045105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.045105] env[62109]: _ensure_no_port_binding_failure(port) [ 561.045105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.045105] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.045994] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 561.045994] env[62109]: Removing descriptor: 18 [ 561.045994] env[62109]: ERROR nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Traceback (most recent call last): [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] yield resources [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.driver.spawn(context, instance, image_meta, [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.045994] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] vm_ref = self.build_virtual_machine(instance, [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] for vif in network_info: [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self._sync_wrapper(fn, *args, **kwargs) [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.wait() [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self[:] = self._gt.wait() [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self._exit_event.wait() [ 561.046356] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] result = hub.switch() [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self.greenlet.switch() [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] result = function(*args, **kwargs) [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return func(*args, **kwargs) [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise e [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] nwinfo = self.network_api.allocate_for_instance( [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.046740] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] created_port_ids = self._update_ports_for_instance( [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] with excutils.save_and_reraise_exception(): [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.force_reraise() [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise self.value [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] updated_port = self._update_port( [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] _ensure_no_port_binding_failure(port) [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.047072] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise exception.PortBindingFailed(port_id=port['id']) [ 561.047374] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 561.047374] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] [ 561.047374] env[62109]: INFO nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Terminating instance [ 561.048310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquiring lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.048464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquired lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.048731] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.485237] env[62109]: DEBUG nova.compute.utils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.494181] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 561.494460] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 561.504175] env[62109]: INFO nova.compute.manager [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Took 17.69 seconds to build instance. [ 561.540274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Releasing lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.540274] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.540274] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 561.540274] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4de3ea9-9a93-4d8c-b1a0-5cd21a57e0c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.558051] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffe9f5d-68b2-4fcd-91e6-055d3bc78ba2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.588325] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 892142c4-8d74-4c27-95ed-2edc07def573 could not be found. [ 561.588562] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 561.589086] env[62109]: INFO nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Took 0.05 seconds to destroy the instance on the hypervisor. [ 561.589086] env[62109]: DEBUG oslo.service.loopingcall [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.589209] env[62109]: DEBUG nova.compute.manager [-] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.589292] env[62109]: DEBUG nova.network.neutron [-] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 561.648785] env[62109]: DEBUG nova.network.neutron [-] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.661287] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.729067] env[62109]: DEBUG nova.policy [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9705c72ac154ae7b4d47282010d5a66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc5b149cd1a949d28310e8076f854cf2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 561.737174] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d77927-c083-4850-ad41-eae71c32fe51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.745579] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396ffe33-91fc-4bf3-a0f4-ec38b56a55e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.785371] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d6fc5a-366a-4c0d-8faf-614f2e9a18da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.795461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96117326-fc9b-41e7-af10-beb3bef7430a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.810725] env[62109]: DEBUG nova.compute.provider_tree [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.829891] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.842391] env[62109]: DEBUG nova.compute.manager [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Received event network-changed-a9554fbe-668e-4592-95ed-1228e9eabbe7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 561.842669] env[62109]: DEBUG nova.compute.manager [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Refreshing instance network info cache due to event network-changed-a9554fbe-668e-4592-95ed-1228e9eabbe7. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 561.842881] env[62109]: DEBUG oslo_concurrency.lockutils [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] Acquiring lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.843026] env[62109]: DEBUG oslo_concurrency.lockutils [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] Acquired lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.843183] env[62109]: DEBUG nova.network.neutron [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Refreshing network info cache for port a9554fbe-668e-4592-95ed-1228e9eabbe7 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.008852] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 562.015830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-504e4e1c-f19b-45d6-9893-3d3345cea737 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.205s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.015830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "13988400-7b35-4175-9410-84eff918111d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.372s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.015830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3684d690-2a0e-494d-b151-471f76eb6778 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.044165] env[62109]: ERROR nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 562.044165] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.044165] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 562.044165] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 562.044165] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.044165] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.044165] env[62109]: ERROR nova.compute.manager raise self.value [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 562.044165] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 562.044165] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.044165] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 562.044697] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.044697] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 562.044697] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 562.044697] env[62109]: ERROR nova.compute.manager [ 562.044697] env[62109]: Traceback (most recent call last): [ 562.044697] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 562.044697] env[62109]: listener.cb(fileno) [ 562.044697] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.044697] env[62109]: result = function(*args, **kwargs) [ 562.044697] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 562.044697] env[62109]: return func(*args, **kwargs) [ 562.044697] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.044697] env[62109]: raise e [ 562.044697] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.044697] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 562.044697] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 562.044697] env[62109]: created_port_ids = self._update_ports_for_instance( [ 562.044697] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 562.044697] env[62109]: with excutils.save_and_reraise_exception(): [ 562.044697] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.044697] env[62109]: self.force_reraise() [ 562.044697] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.044697] env[62109]: raise self.value [ 562.044697] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 562.044697] env[62109]: updated_port = self._update_port( [ 562.044697] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.044697] env[62109]: _ensure_no_port_binding_failure(port) [ 562.044697] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.044697] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 562.045545] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 562.045545] env[62109]: Removing descriptor: 19 [ 562.045545] env[62109]: ERROR nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Traceback (most recent call last): [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] yield resources [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.driver.spawn(context, instance, image_meta, [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.045545] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] vm_ref = self.build_virtual_machine(instance, [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] for vif in network_info: [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self._sync_wrapper(fn, *args, **kwargs) [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.wait() [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self[:] = self._gt.wait() [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self._exit_event.wait() [ 562.045912] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] result = hub.switch() [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self.greenlet.switch() [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] result = function(*args, **kwargs) [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return func(*args, **kwargs) [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise e [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] nwinfo = self.network_api.allocate_for_instance( [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 562.046298] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] created_port_ids = self._update_ports_for_instance( [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] with excutils.save_and_reraise_exception(): [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.force_reraise() [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise self.value [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] updated_port = self._update_port( [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] _ensure_no_port_binding_failure(port) [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.046681] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise exception.PortBindingFailed(port_id=port['id']) [ 562.051014] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 562.051014] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] [ 562.051014] env[62109]: INFO nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Terminating instance [ 562.051014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquiring lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.051014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquired lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.051014] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 562.153211] env[62109]: DEBUG nova.network.neutron [-] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.212122] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Successfully created port: 29546bda-6d6d-4096-ad62-678eeaff25ce {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.221974] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquiring lock "18d23737-e0e7-44c9-887e-6531acf496fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.222314] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "18d23737-e0e7-44c9-887e-6531acf496fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.318327] env[62109]: DEBUG nova.scheduler.client.report [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.338321] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Releasing lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.338754] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 562.338969] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 562.339515] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24be6657-1f5c-452c-927a-b3b3c608324f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.350649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ff3711-b10c-4664-aec2-9b2a74daa650 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.373208] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a10c8e2e-9b5c-498e-81dc-ca69af0ff123 could not be found. [ 562.373508] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 562.373689] env[62109]: INFO nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Took 0.03 seconds to destroy the instance on the hypervisor. [ 562.373981] env[62109]: DEBUG oslo.service.loopingcall [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.374220] env[62109]: DEBUG nova.compute.manager [-] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.374315] env[62109]: DEBUG nova.network.neutron [-] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.390299] env[62109]: DEBUG nova.network.neutron [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.408027] env[62109]: DEBUG nova.network.neutron [-] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.524478] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "13988400-7b35-4175-9410-84eff918111d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.613595] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.621861] env[62109]: DEBUG nova.network.neutron [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.656848] env[62109]: INFO nova.compute.manager [-] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Took 1.07 seconds to deallocate network for instance. [ 562.660486] env[62109]: DEBUG nova.compute.claims [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 562.660486] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.720241] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.728470] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 562.830676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.847s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.830676] env[62109]: ERROR nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Traceback (most recent call last): [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.driver.spawn(context, instance, image_meta, [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.830676] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] vm_ref = self.build_virtual_machine(instance, [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] for vif in network_info: [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self._sync_wrapper(fn, *args, **kwargs) [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.wait() [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self[:] = self._gt.wait() [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self._exit_event.wait() [ 562.831050] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] result = hub.switch() [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return self.greenlet.switch() [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] result = function(*args, **kwargs) [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] return func(*args, **kwargs) [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise e [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] nwinfo = self.network_api.allocate_for_instance( [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 562.831515] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] created_port_ids = self._update_ports_for_instance( [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] with excutils.save_and_reraise_exception(): [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] self.force_reraise() [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise self.value [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] updated_port = self._update_port( [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] _ensure_no_port_binding_failure(port) [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.831882] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] raise exception.PortBindingFailed(port_id=port['id']) [ 562.832189] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] nova.exception.PortBindingFailed: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. [ 562.832189] env[62109]: ERROR nova.compute.manager [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] [ 562.832189] env[62109]: DEBUG nova.compute.utils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 562.832408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.383s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.840892] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Build of instance 53bae73f-2aec-41c4-bd62-aeedbf162258 was re-scheduled: Binding failed for port 789aa4a4-faf6-4751-a092-d070d022284b, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 562.841488] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 562.841793] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.841848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquired lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.842025] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 562.909811] env[62109]: DEBUG nova.network.neutron [-] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.022787] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 563.053265] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.053536] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.053695] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.053874] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.054029] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.054180] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.054386] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.054542] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.054729] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.054966] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.055044] env[62109]: DEBUG nova.virt.hardware [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.056643] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc96ee9-70ac-4c0a-b51a-31a7983eb999 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.064469] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5801b4e6-b0cc-43da-b9f1-c699fac7e5ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.124944] env[62109]: DEBUG oslo_concurrency.lockutils [req-f17379bf-813f-412c-809a-6ff4bd0204a6 req-f397d163-123d-49d6-be62-3baa1b954302 service nova] Releasing lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.225201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Releasing lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.225905] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 563.226142] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 563.226466] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-079f563b-3a3a-4c04-9152-fd3b908b92fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.242087] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5e2f50-48ff-4ddb-bbf4-0edc3d6871a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.255335] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.268539] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b52ff4f8-2341-493e-8587-b8d1d12efb7e could not be found. [ 563.269064] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 563.269064] env[62109]: INFO nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 563.269277] env[62109]: DEBUG oslo.service.loopingcall [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.269652] env[62109]: DEBUG nova.compute.manager [-] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 563.269652] env[62109]: DEBUG nova.network.neutron [-] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 563.306802] env[62109]: DEBUG nova.network.neutron [-] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.378206] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.419376] env[62109]: INFO nova.compute.manager [-] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Took 1.04 seconds to deallocate network for instance. [ 563.422235] env[62109]: DEBUG nova.compute.claims [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 563.422235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.542863] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf512b91-7911-405b-b9ef-240e3adf46b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.553340] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0aadb5-bcf2-46ac-8028-ecd7de1db43b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.588409] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.593331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbcc11c-8eaa-4a95-b397-1eaf7204f1ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.606701] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427d85b7-24e1-4d5a-857f-880cda076cfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.628077] env[62109]: DEBUG nova.compute.provider_tree [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.788937] env[62109]: ERROR nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 563.788937] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.788937] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.788937] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.788937] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.788937] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.788937] env[62109]: ERROR nova.compute.manager raise self.value [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.788937] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.788937] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.788937] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.789504] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.789504] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.789504] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 563.789504] env[62109]: ERROR nova.compute.manager [ 563.789504] env[62109]: Traceback (most recent call last): [ 563.789504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.789504] env[62109]: listener.cb(fileno) [ 563.789504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.789504] env[62109]: result = function(*args, **kwargs) [ 563.789504] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.789504] env[62109]: return func(*args, **kwargs) [ 563.789504] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.789504] env[62109]: raise e [ 563.789504] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.789504] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 563.789504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.789504] env[62109]: created_port_ids = self._update_ports_for_instance( [ 563.789504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.789504] env[62109]: with excutils.save_and_reraise_exception(): [ 563.789504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.789504] env[62109]: self.force_reraise() [ 563.789504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.789504] env[62109]: raise self.value [ 563.789504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.789504] env[62109]: updated_port = self._update_port( [ 563.789504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.789504] env[62109]: _ensure_no_port_binding_failure(port) [ 563.789504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.789504] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.790250] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 563.790250] env[62109]: Removing descriptor: 15 [ 563.790250] env[62109]: ERROR nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Traceback (most recent call last): [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] yield resources [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.driver.spawn(context, instance, image_meta, [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.790250] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] vm_ref = self.build_virtual_machine(instance, [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] for vif in network_info: [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self._sync_wrapper(fn, *args, **kwargs) [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.wait() [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self[:] = self._gt.wait() [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self._exit_event.wait() [ 563.790615] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] result = hub.switch() [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self.greenlet.switch() [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] result = function(*args, **kwargs) [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return func(*args, **kwargs) [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise e [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] nwinfo = self.network_api.allocate_for_instance( [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.790967] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] created_port_ids = self._update_ports_for_instance( [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] with excutils.save_and_reraise_exception(): [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.force_reraise() [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise self.value [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] updated_port = self._update_port( [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] _ensure_no_port_binding_failure(port) [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.791333] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise exception.PortBindingFailed(port_id=port['id']) [ 563.791645] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 563.791645] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] [ 563.791645] env[62109]: INFO nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Terminating instance [ 563.794420] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquiring lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.794592] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquired lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.794756] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.809198] env[62109]: DEBUG nova.network.neutron [-] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.880587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquiring lock "a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.880587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.046043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquiring lock "356e57cb-9e43-47e1-a02b-b81ff737883e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.046421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "356e57cb-9e43-47e1-a02b-b81ff737883e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.093669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Releasing lock "refresh_cache-53bae73f-2aec-41c4-bd62-aeedbf162258" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.093938] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 564.094138] env[62109]: DEBUG nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.094308] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 564.123859] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.132023] env[62109]: DEBUG nova.scheduler.client.report [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.312676] env[62109]: INFO nova.compute.manager [-] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Took 1.04 seconds to deallocate network for instance. [ 564.315031] env[62109]: DEBUG nova.compute.claims [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 564.315208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.325630] env[62109]: ERROR nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 564.325630] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.325630] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 564.325630] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 564.325630] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.325630] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.325630] env[62109]: ERROR nova.compute.manager raise self.value [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 564.325630] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 564.325630] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.325630] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 564.326247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.326247] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 564.326247] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 564.326247] env[62109]: ERROR nova.compute.manager [ 564.326247] env[62109]: Traceback (most recent call last): [ 564.326247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 564.326247] env[62109]: listener.cb(fileno) [ 564.326247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.326247] env[62109]: result = function(*args, **kwargs) [ 564.326247] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.326247] env[62109]: return func(*args, **kwargs) [ 564.326247] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 564.326247] env[62109]: raise e [ 564.326247] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.326247] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 564.326247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 564.326247] env[62109]: created_port_ids = self._update_ports_for_instance( [ 564.326247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 564.326247] env[62109]: with excutils.save_and_reraise_exception(): [ 564.326247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.326247] env[62109]: self.force_reraise() [ 564.326247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.326247] env[62109]: raise self.value [ 564.326247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 564.326247] env[62109]: updated_port = self._update_port( [ 564.326247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.326247] env[62109]: _ensure_no_port_binding_failure(port) [ 564.326247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.326247] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 564.327024] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 564.327024] env[62109]: Removing descriptor: 18 [ 564.327024] env[62109]: ERROR nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Traceback (most recent call last): [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] yield resources [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.driver.spawn(context, instance, image_meta, [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.327024] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] vm_ref = self.build_virtual_machine(instance, [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] for vif in network_info: [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self._sync_wrapper(fn, *args, **kwargs) [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.wait() [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self[:] = self._gt.wait() [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self._exit_event.wait() [ 564.327320] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] result = hub.switch() [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self.greenlet.switch() [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] result = function(*args, **kwargs) [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return func(*args, **kwargs) [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise e [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] nwinfo = self.network_api.allocate_for_instance( [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 564.327643] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] created_port_ids = self._update_ports_for_instance( [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] with excutils.save_and_reraise_exception(): [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.force_reraise() [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise self.value [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] updated_port = self._update_port( [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] _ensure_no_port_binding_failure(port) [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.327958] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise exception.PortBindingFailed(port_id=port['id']) [ 564.328266] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 564.328266] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] [ 564.328266] env[62109]: INFO nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Terminating instance [ 564.328705] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquiring lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.328923] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquired lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.328994] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 564.381039] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 564.404510] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.584683] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.627175] env[62109]: DEBUG nova.network.neutron [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.636729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.804s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.637392] env[62109]: ERROR nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Traceback (most recent call last): [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.driver.spawn(context, instance, image_meta, [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] vm_ref = self.build_virtual_machine(instance, [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.637392] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] for vif in network_info: [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self._sync_wrapper(fn, *args, **kwargs) [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.wait() [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self[:] = self._gt.wait() [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self._exit_event.wait() [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] result = hub.switch() [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.637725] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return self.greenlet.switch() [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] result = function(*args, **kwargs) [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] return func(*args, **kwargs) [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise e [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] nwinfo = self.network_api.allocate_for_instance( [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] created_port_ids = self._update_ports_for_instance( [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] with excutils.save_and_reraise_exception(): [ 564.638096] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] self.force_reraise() [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise self.value [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] updated_port = self._update_port( [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] _ensure_no_port_binding_failure(port) [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] raise exception.PortBindingFailed(port_id=port['id']) [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] nova.exception.PortBindingFailed: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. [ 564.638437] env[62109]: ERROR nova.compute.manager [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] [ 564.638754] env[62109]: DEBUG nova.compute.utils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 564.639631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.955s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.639810] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.640009] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 564.640291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.221s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.641920] env[62109]: INFO nova.compute.claims [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.649024] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Build of instance 11f4e78d-12c7-4f93-8104-134d337ee6e0 was re-scheduled: Binding failed for port 805389e0-22e2-4986-a49e-ca570e7c3a80, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 564.649024] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 564.649024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.649024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.649569] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 564.649569] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d301467-4ed4-438a-9ab6-e112bdd45812 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.657788] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95775a5a-ff5b-47fd-9ecc-0ee82b47c9e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.678474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a91225-3434-4d2d-90d5-82dacc0b8901 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.685764] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8135dfa-3f7f-4cc7-8395-a981e210ea0c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.721216] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181570MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 564.721216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.905167] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.925090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.090145] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Releasing lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.090620] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 565.090841] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 565.092178] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79f96ab0-2cda-414a-96a4-9b0c35ca6d5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.105039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6a200b-eaf4-471e-911c-6b464ebcb09f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.136494] env[62109]: INFO nova.compute.manager [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] Took 1.04 seconds to deallocate network for instance. [ 565.140015] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46bb583c-bc67-4b18-903d-afbbf8248691 could not be found. [ 565.140306] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 565.140582] env[62109]: INFO nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Took 0.05 seconds to destroy the instance on the hypervisor. [ 565.141675] env[62109]: DEBUG oslo.service.loopingcall [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.141675] env[62109]: DEBUG nova.compute.manager [-] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.141675] env[62109]: DEBUG nova.network.neutron [-] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 565.202131] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.204671] env[62109]: DEBUG nova.network.neutron [-] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.244789] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.337564] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "49137502-b0a0-49f8-affa-82f19caf34b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.337781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "49137502-b0a0-49f8-affa-82f19caf34b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.337957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "28e71e8c-2c47-4ea8-bd90-33eb064073e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.338143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "28e71e8c-2c47-4ea8-bd90-33eb064073e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.388263] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.466221] env[62109]: DEBUG nova.compute.manager [None req-47929edd-743c-4156-bd64-84d308b3f6c7 tempest-ServerDiagnosticsV248Test-1208550919 tempest-ServerDiagnosticsV248Test-1208550919-project-admin] [instance: 13988400-7b35-4175-9410-84eff918111d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 565.467501] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c9214f-eee8-41eb-a066-1c6c2b33da36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.477402] env[62109]: INFO nova.compute.manager [None req-47929edd-743c-4156-bd64-84d308b3f6c7 tempest-ServerDiagnosticsV248Test-1208550919 tempest-ServerDiagnosticsV248Test-1208550919-project-admin] [instance: 13988400-7b35-4175-9410-84eff918111d] Retrieving diagnostics [ 565.478735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3567b6-4752-40e3-9e2b-d85ff296ec39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.708837] env[62109]: DEBUG nova.network.neutron [-] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.748462] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Releasing lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.748462] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 565.748462] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 565.751481] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1fab1fb-6192-411a-a1ab-935423921394 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.763070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0dd192-5d95-48b5-b073-2385d0353523 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.789573] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4 could not be found. [ 565.789792] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 565.789973] env[62109]: INFO nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 565.790232] env[62109]: DEBUG oslo.service.loopingcall [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.794548] env[62109]: DEBUG nova.compute.manager [-] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.794655] env[62109]: DEBUG nova.network.neutron [-] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 565.815783] env[62109]: DEBUG nova.network.neutron [-] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.867054] env[62109]: DEBUG nova.compute.manager [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Received event network-changed-c8e5852b-f51f-4721-bedd-abcab4c157ad {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 565.867291] env[62109]: DEBUG nova.compute.manager [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Refreshing instance network info cache due to event network-changed-c8e5852b-f51f-4721-bedd-abcab4c157ad. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 565.867503] env[62109]: DEBUG oslo_concurrency.lockutils [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] Acquiring lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.867642] env[62109]: DEBUG oslo_concurrency.lockutils [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] Acquired lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.867792] env[62109]: DEBUG nova.network.neutron [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Refreshing network info cache for port c8e5852b-f51f-4721-bedd-abcab4c157ad {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 565.890801] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-11f4e78d-12c7-4f93-8104-134d337ee6e0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.891045] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 565.894031] env[62109]: DEBUG nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.894031] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 565.914419] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.932160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9a9790-165b-42cd-a438-5a7ddc3eb305 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.941804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f608720-64d6-44b6-b2f9-86ef12705bcf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.976226] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aae345-23b2-4961-8f94-8ce1da3a9e6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.983916] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc57db0-e867-4b88-b0ca-be94e020479b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.999372] env[62109]: DEBUG nova.compute.provider_tree [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.024062] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Received event network-vif-deleted-a9554fbe-668e-4592-95ed-1228e9eabbe7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.024180] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Received event network-changed-6eb0afc7-671f-4f09-86b2-4de081c5656d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.024383] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Refreshing instance network info cache due to event network-changed-6eb0afc7-671f-4f09-86b2-4de081c5656d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 566.024531] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Acquiring lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.025213] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Acquired lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.025591] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Refreshing network info cache for port 6eb0afc7-671f-4f09-86b2-4de081c5656d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 566.173283] env[62109]: INFO nova.scheduler.client.report [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Deleted allocations for instance 53bae73f-2aec-41c4-bd62-aeedbf162258 [ 566.213914] env[62109]: INFO nova.compute.manager [-] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Took 1.07 seconds to deallocate network for instance. [ 566.216086] env[62109]: DEBUG nova.compute.claims [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 566.216271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.318520] env[62109]: DEBUG nova.network.neutron [-] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.413284] env[62109]: DEBUG nova.network.neutron [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.420594] env[62109]: DEBUG nova.network.neutron [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.469160] env[62109]: DEBUG nova.network.neutron [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.498695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquiring lock "58f76ca2-8f1b-4d9f-887b-1527ba70e91c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.498999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "58f76ca2-8f1b-4d9f-887b-1527ba70e91c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.502384] env[62109]: DEBUG nova.scheduler.client.report [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.546705] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.610724] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.681646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5e622fe-7469-4d6c-9a00-933d020e92c6 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "53bae73f-2aec-41c4-bd62-aeedbf162258" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.113s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.685886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "53bae73f-2aec-41c4-bd62-aeedbf162258" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.042s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.685886] env[62109]: INFO nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 53bae73f-2aec-41c4-bd62-aeedbf162258] During sync_power_state the instance has a pending task (spawning). Skip. [ 566.685886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "53bae73f-2aec-41c4-bd62-aeedbf162258" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.003s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.823649] env[62109]: INFO nova.compute.manager [-] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Took 1.03 seconds to deallocate network for instance. [ 566.828729] env[62109]: DEBUG nova.compute.claims [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 566.828922] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.924276] env[62109]: INFO nova.compute.manager [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] Took 1.03 seconds to deallocate network for instance. [ 566.973262] env[62109]: DEBUG oslo_concurrency.lockutils [req-4004f902-d839-48ae-82bd-aaf44d401efc req-e9ce2dea-72d0-4af9-80d9-c7800a347e6d service nova] Releasing lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.013269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.013269] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 567.015220] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.355s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.115546] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Releasing lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.115546] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Received event network-vif-deleted-6eb0afc7-671f-4f09-86b2-4de081c5656d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.115546] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Received event network-changed-186a3613-a077-4a81-8822-8c8b454bd666 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.115546] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Refreshing instance network info cache due to event network-changed-186a3613-a077-4a81-8822-8c8b454bd666. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 567.115546] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Acquiring lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.115776] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Acquired lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.115776] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Refreshing network info cache for port 186a3613-a077-4a81-8822-8c8b454bd666 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 567.187934] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 567.483751] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquiring lock "21efb09c-8d90-415c-815a-af6ce6707c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.483966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "21efb09c-8d90-415c-815a-af6ce6707c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.523222] env[62109]: DEBUG nova.compute.utils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.524623] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 567.524888] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.613922] env[62109]: DEBUG nova.policy [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd3d199881514997a123eb6c62f4fede', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '523e7f9fa3dd4b54afd8c52200a00f84', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 567.652044] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.718361] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.771439] env[62109]: DEBUG nova.network.neutron [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.802979] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a90bbe4-f039-4fa1-a591-77f13ae181f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.812726] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b47f5c9-656a-4a39-a625-e36a515e2384 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.853789] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b625a8-565a-4691-81c0-246bf312d1b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.864177] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cde07f8-4866-430f-bf2d-3b0328850a4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.881723] env[62109]: DEBUG nova.compute.provider_tree [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.948146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "a6d094c3-8488-4437-8972-aa246809a5b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.948146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "a6d094c3-8488-4437-8972-aa246809a5b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.973295] env[62109]: INFO nova.scheduler.client.report [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 11f4e78d-12c7-4f93-8104-134d337ee6e0 [ 568.033583] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 568.275097] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] Releasing lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.275097] env[62109]: DEBUG nova.compute.manager [req-0a11b5c7-6b3a-4d81-bcc4-3f910969d531 req-01e57aee-c04b-49dd-9a55-3c100d492026 service nova] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Received event network-vif-deleted-186a3613-a077-4a81-8822-8c8b454bd666 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.370658] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Successfully created port: 8a4d4c47-2ce5-46f4-b366-81723ff941c8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.387280] env[62109]: DEBUG nova.scheduler.client.report [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.487739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d5205e6-b351-49fc-ba13-58777b85660d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.619s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.489766] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.849s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.489962] env[62109]: INFO nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 11f4e78d-12c7-4f93-8104-134d337ee6e0] During sync_power_state the instance has a pending task (networking). Skip. [ 568.490158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "11f4e78d-12c7-4f93-8104-134d337ee6e0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.675351] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "693e6fa3-4d5f-47aa-8543-32f21001b78f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.675589] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "693e6fa3-4d5f-47aa-8543-32f21001b78f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.895961] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.896658] env[62109]: ERROR nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Traceback (most recent call last): [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.driver.spawn(context, instance, image_meta, [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] vm_ref = self.build_virtual_machine(instance, [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.896658] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] for vif in network_info: [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self._sync_wrapper(fn, *args, **kwargs) [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.wait() [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self[:] = self._gt.wait() [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self._exit_event.wait() [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] result = hub.switch() [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.897020] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return self.greenlet.switch() [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] result = function(*args, **kwargs) [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] return func(*args, **kwargs) [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise e [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] nwinfo = self.network_api.allocate_for_instance( [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] created_port_ids = self._update_ports_for_instance( [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] with excutils.save_and_reraise_exception(): [ 568.897370] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] self.force_reraise() [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise self.value [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] updated_port = self._update_port( [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] _ensure_no_port_binding_failure(port) [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] raise exception.PortBindingFailed(port_id=port['id']) [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] nova.exception.PortBindingFailed: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. [ 568.897718] env[62109]: ERROR nova.compute.manager [instance: 892142c4-8d74-4c27-95ed-2edc07def573] [ 568.898040] env[62109]: DEBUG nova.compute.utils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 568.899623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.644s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.901069] env[62109]: INFO nova.compute.claims [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.905045] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Build of instance 892142c4-8d74-4c27-95ed-2edc07def573 was re-scheduled: Binding failed for port a9554fbe-668e-4592-95ed-1228e9eabbe7, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 568.905515] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 568.907361] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquiring lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.907361] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Acquired lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.907361] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.909310] env[62109]: DEBUG nova.compute.manager [req-7d105a3a-7ac0-43ba-9506-22b362e3b12a req-f9e48c92-4d66-4454-b413-f575eb82698b service nova] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Received event network-vif-deleted-c8e5852b-f51f-4721-bedd-abcab4c157ad {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.992395] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 569.028705] env[62109]: DEBUG nova.compute.manager [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Received event network-changed-29546bda-6d6d-4096-ad62-678eeaff25ce {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.028815] env[62109]: DEBUG nova.compute.manager [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Refreshing instance network info cache due to event network-changed-29546bda-6d6d-4096-ad62-678eeaff25ce. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 569.029157] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] Acquiring lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.029223] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] Acquired lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.029345] env[62109]: DEBUG nova.network.neutron [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Refreshing network info cache for port 29546bda-6d6d-4096-ad62-678eeaff25ce {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 569.043770] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 569.075374] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.075572] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.075732] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.075910] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.076643] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.076851] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.077085] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.077248] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.077425] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.077592] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.077765] env[62109]: DEBUG nova.virt.hardware [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.079283] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41caba4-3f44-4355-baf8-6bca8b3d4838 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.089916] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09482b24-6131-4d6f-b62b-011a3ca26a3e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.449908] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "b60d334c-0834-4267-bb31-1f3c679a2e1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.450293] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "b60d334c-0834-4267-bb31-1f3c679a2e1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.458488] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.519705] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.549250] env[62109]: DEBUG nova.network.neutron [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.639624] env[62109]: DEBUG nova.network.neutron [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.918242] env[62109]: ERROR nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 569.918242] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.918242] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.918242] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.918242] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.918242] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.918242] env[62109]: ERROR nova.compute.manager raise self.value [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.918242] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 569.918242] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.918242] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 569.918772] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.918772] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 569.918772] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 569.918772] env[62109]: ERROR nova.compute.manager [ 569.918912] env[62109]: Traceback (most recent call last): [ 569.918951] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 569.918951] env[62109]: listener.cb(fileno) [ 569.918951] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.918951] env[62109]: result = function(*args, **kwargs) [ 569.918951] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.918951] env[62109]: return func(*args, **kwargs) [ 569.918951] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.918951] env[62109]: raise e [ 569.918951] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.918951] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 569.918951] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.918951] env[62109]: created_port_ids = self._update_ports_for_instance( [ 569.918951] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.918951] env[62109]: with excutils.save_and_reraise_exception(): [ 569.918951] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.918951] env[62109]: self.force_reraise() [ 569.919389] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.919389] env[62109]: raise self.value [ 569.919389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.919389] env[62109]: updated_port = self._update_port( [ 569.919389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.919389] env[62109]: _ensure_no_port_binding_failure(port) [ 569.919389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.919389] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 569.919389] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 569.919389] env[62109]: Removing descriptor: 16 [ 569.927333] env[62109]: ERROR nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Traceback (most recent call last): [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] yield resources [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.driver.spawn(context, instance, image_meta, [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] vm_ref = self.build_virtual_machine(instance, [ 569.927333] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] for vif in network_info: [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self._sync_wrapper(fn, *args, **kwargs) [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.wait() [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self[:] = self._gt.wait() [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self._exit_event.wait() [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.928062] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] result = hub.switch() [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self.greenlet.switch() [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] result = function(*args, **kwargs) [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return func(*args, **kwargs) [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise e [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] nwinfo = self.network_api.allocate_for_instance( [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] created_port_ids = self._update_ports_for_instance( [ 569.928440] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] with excutils.save_and_reraise_exception(): [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.force_reraise() [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise self.value [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] updated_port = self._update_port( [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] _ensure_no_port_binding_failure(port) [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise exception.PortBindingFailed(port_id=port['id']) [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 569.929078] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] [ 569.929490] env[62109]: INFO nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Terminating instance [ 569.929490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquiring lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.929490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquired lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.929490] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.977586] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.144017] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] Releasing lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.146664] env[62109]: DEBUG nova.compute.manager [req-b6b90bc4-aff8-4e2b-a341-68e3af76e811 req-9fe0b03f-5114-40b5-ad66-03cc2f5b768b service nova] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Received event network-vif-deleted-29546bda-6d6d-4096-ad62-678eeaff25ce {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 570.257490] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f37a1f-0b0a-479c-b851-f46f9ce57d37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.269568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7de1f6-ffcd-40c6-971b-4db634270f75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.313145] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6634d46a-25df-4afc-a2cf-0a646424b7e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.323651] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec82bb3-53c7-48ae-919d-c2d1805db1ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.340505] env[62109]: DEBUG nova.compute.provider_tree [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.483939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Releasing lock "refresh_cache-892142c4-8d74-4c27-95ed-2edc07def573" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.483939] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 570.483939] env[62109]: DEBUG nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 570.483939] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 570.487048] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.501956] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.799602] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "a05a3519-0395-4e49-b655-a6c6d7bd85a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.799889] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "a05a3519-0395-4e49-b655-a6c6d7bd85a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.841741] env[62109]: DEBUG nova.scheduler.client.report [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 570.852879] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.004970] env[62109]: DEBUG nova.network.neutron [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.348858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.349416] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 571.352019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.930s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.356054] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Releasing lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.356415] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 571.356599] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 571.356866] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-989bda64-f0cb-4a7e-bd9e-3ae283ae590a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.366706] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8a1012-26e6-4afd-ba76-6d31fa11d67a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.396059] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41eeecaa-8514-4550-989e-43db00dff6bb could not be found. [ 571.396314] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 571.396496] env[62109]: INFO nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 571.396748] env[62109]: DEBUG oslo.service.loopingcall [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.398016] env[62109]: DEBUG nova.compute.manager [-] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.398124] env[62109]: DEBUG nova.network.neutron [-] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 571.444755] env[62109]: DEBUG nova.network.neutron [-] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.507649] env[62109]: INFO nova.compute.manager [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] [instance: 892142c4-8d74-4c27-95ed-2edc07def573] Took 1.02 seconds to deallocate network for instance. [ 571.856653] env[62109]: DEBUG nova.compute.utils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.861298] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 571.861700] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 571.947889] env[62109]: DEBUG nova.network.neutron [-] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.033552] env[62109]: DEBUG nova.policy [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84fa2473b1534ccda21663bca1ba184e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '740d7d319e8c40ef87b927e84dbd9fa4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.144653] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afda4d4-34c3-41c3-85f6-b7bb5febeda0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.157878] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b100ebe7-7442-43cf-92ed-2617aa9d2ec1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.196859] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e90cec-ad7f-46b6-903f-740d4ec40a87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.207097] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef6f2b0-fdd6-4b22-87fe-5324c898a3b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.224322] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 572.361933] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 572.451596] env[62109]: INFO nova.compute.manager [-] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Took 1.05 seconds to deallocate network for instance. [ 572.453998] env[62109]: DEBUG nova.compute.claims [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 572.454222] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.541034] env[62109]: INFO nova.scheduler.client.report [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Deleted allocations for instance 892142c4-8d74-4c27-95ed-2edc07def573 [ 572.747466] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Successfully created port: 06c37800-4b43-44ae-b1b0-e1b65c1b680b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.753119] env[62109]: ERROR nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [req-c5c75e66-5fc2-4bb0-8f53-d792bde83f65] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5c75e66-5fc2-4bb0-8f53-d792bde83f65"}]}: nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 572.783830] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 572.803856] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 572.804097] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 572.817568] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 572.845036] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 573.051237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7e340211-d5ca-4da4-a03a-a2e242cd3116 tempest-ImagesNegativeTestJSON-1853609162 tempest-ImagesNegativeTestJSON-1853609162-project-member] Lock "892142c4-8d74-4c27-95ed-2edc07def573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.386s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.124521] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f13ae6f-4ee1-4dae-93f3-3fb3a6ff2851 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.135141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3567e6a1-818a-4abf-9d2d-1f7672e7d840 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.170946] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01b0d42-9c11-4cb6-b966-32b5a4257c36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.179570] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44247e7d-22fb-445d-be9a-d853bc132ed5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.197677] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.214278] env[62109]: DEBUG nova.compute.manager [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Received event network-changed-8a4d4c47-2ce5-46f4-b366-81723ff941c8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.214481] env[62109]: DEBUG nova.compute.manager [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Refreshing instance network info cache due to event network-changed-8a4d4c47-2ce5-46f4-b366-81723ff941c8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.214697] env[62109]: DEBUG oslo_concurrency.lockutils [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] Acquiring lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.215046] env[62109]: DEBUG oslo_concurrency.lockutils [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] Acquired lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.215046] env[62109]: DEBUG nova.network.neutron [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Refreshing network info cache for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.377015] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 573.416543] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.417050] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.417339] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.417713] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.418028] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.418580] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.418923] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.419259] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.419578] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.419865] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.420184] env[62109]: DEBUG nova.virt.hardware [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.421144] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbb03ff-4d3f-4ece-96eb-3d39bc77f44c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.433439] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dbd90f-e033-4af7-a3a3-8f2d224946ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.554458] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 573.741016] env[62109]: DEBUG nova.network.neutron [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.745137] env[62109]: DEBUG nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 24 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 573.745394] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 24 to 25 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 573.745568] env[62109]: DEBUG nova.compute.provider_tree [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.839641] env[62109]: DEBUG nova.network.neutron [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.082693] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.252743] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.901s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.254554] env[62109]: ERROR nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Traceback (most recent call last): [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.driver.spawn(context, instance, image_meta, [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] vm_ref = self.build_virtual_machine(instance, [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.254554] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] for vif in network_info: [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self._sync_wrapper(fn, *args, **kwargs) [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.wait() [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self[:] = self._gt.wait() [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self._exit_event.wait() [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] result = hub.switch() [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.254914] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return self.greenlet.switch() [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] result = function(*args, **kwargs) [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] return func(*args, **kwargs) [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise e [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] nwinfo = self.network_api.allocate_for_instance( [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] created_port_ids = self._update_ports_for_instance( [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] with excutils.save_and_reraise_exception(): [ 574.255293] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] self.force_reraise() [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise self.value [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] updated_port = self._update_port( [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] _ensure_no_port_binding_failure(port) [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] raise exception.PortBindingFailed(port_id=port['id']) [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] nova.exception.PortBindingFailed: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. [ 574.255679] env[62109]: ERROR nova.compute.manager [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] [ 574.256901] env[62109]: DEBUG nova.compute.utils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 574.256901] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.942s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.259739] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Build of instance a10c8e2e-9b5c-498e-81dc-ca69af0ff123 was re-scheduled: Binding failed for port 6eb0afc7-671f-4f09-86b2-4de081c5656d, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 574.260214] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 574.260431] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquiring lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.260593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Acquired lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.260780] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.343068] env[62109]: DEBUG oslo_concurrency.lockutils [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] Releasing lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.343068] env[62109]: DEBUG nova.compute.manager [req-063b322f-50f1-42bb-9ec4-6b9d05c7c228 req-0e294db4-018b-4aa5-bc8d-b6aecd09e4e6 service nova] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Received event network-vif-deleted-8a4d4c47-2ce5-46f4-b366-81723ff941c8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.795019] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.801167] env[62109]: ERROR nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 574.801167] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.801167] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.801167] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.801167] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.801167] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.801167] env[62109]: ERROR nova.compute.manager raise self.value [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.801167] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 574.801167] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.801167] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 574.801833] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.801833] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 574.801833] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 574.801833] env[62109]: ERROR nova.compute.manager [ 574.801833] env[62109]: Traceback (most recent call last): [ 574.801833] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 574.801833] env[62109]: listener.cb(fileno) [ 574.801833] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.801833] env[62109]: result = function(*args, **kwargs) [ 574.801833] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.801833] env[62109]: return func(*args, **kwargs) [ 574.801833] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.801833] env[62109]: raise e [ 574.801833] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.801833] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 574.801833] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.801833] env[62109]: created_port_ids = self._update_ports_for_instance( [ 574.801833] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.801833] env[62109]: with excutils.save_and_reraise_exception(): [ 574.801833] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.801833] env[62109]: self.force_reraise() [ 574.801833] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.801833] env[62109]: raise self.value [ 574.801833] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.801833] env[62109]: updated_port = self._update_port( [ 574.801833] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.801833] env[62109]: _ensure_no_port_binding_failure(port) [ 574.801833] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.801833] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 574.803348] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 574.803348] env[62109]: Removing descriptor: 15 [ 574.804666] env[62109]: ERROR nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Traceback (most recent call last): [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] yield resources [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.driver.spawn(context, instance, image_meta, [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] vm_ref = self.build_virtual_machine(instance, [ 574.804666] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] for vif in network_info: [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self._sync_wrapper(fn, *args, **kwargs) [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.wait() [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self[:] = self._gt.wait() [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self._exit_event.wait() [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.806107] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] result = hub.switch() [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self.greenlet.switch() [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] result = function(*args, **kwargs) [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return func(*args, **kwargs) [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise e [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] nwinfo = self.network_api.allocate_for_instance( [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] created_port_ids = self._update_ports_for_instance( [ 574.806423] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] with excutils.save_and_reraise_exception(): [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.force_reraise() [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise self.value [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] updated_port = self._update_port( [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] _ensure_no_port_binding_failure(port) [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise exception.PortBindingFailed(port_id=port['id']) [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 574.806763] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] [ 574.807199] env[62109]: INFO nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Terminating instance [ 574.816550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquiring lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.816550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquired lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.816685] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.855595] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.072126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "e50019d2-d9a1-4077-ba1a-7f7bde266058" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.072126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "e50019d2-d9a1-4077-ba1a-7f7bde266058" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.088057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5668f9b1-fc56-4334-be7a-eda4bdd8f940 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.098640] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e57dde-b657-4ddd-bd11-3477db9494df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.137788] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0198cb-3641-4456-a723-0de7a93a55e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.150447] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f8dccb-a934-4948-b889-bfbb87d3d8ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.168548] env[62109]: DEBUG nova.compute.provider_tree [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.337558] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.363094] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Releasing lock "refresh_cache-a10c8e2e-9b5c-498e-81dc-ca69af0ff123" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.363094] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 575.363094] env[62109]: DEBUG nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.363094] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.386979] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.434568] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.674286] env[62109]: DEBUG nova.scheduler.client.report [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.776026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "590e6f9a-b764-44b4-9117-3deff696a6aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.776026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "590e6f9a-b764-44b4-9117-3deff696a6aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.888934] env[62109]: DEBUG nova.network.neutron [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.937722] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Releasing lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.938079] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 575.938284] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.938572] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ae02500-c1ef-4b3b-b43d-b53961b646ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.948598] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd5b9c9-0dce-4877-b817-238827321573 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.973921] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18d23737-e0e7-44c9-887e-6531acf496fa could not be found. [ 575.974099] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 575.974287] env[62109]: INFO nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 575.974541] env[62109]: DEBUG oslo.service.loopingcall [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.974773] env[62109]: DEBUG nova.compute.manager [-] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.974883] env[62109]: DEBUG nova.network.neutron [-] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.991054] env[62109]: DEBUG nova.network.neutron [-] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.181812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.925s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.182475] env[62109]: ERROR nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Traceback (most recent call last): [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.driver.spawn(context, instance, image_meta, [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] vm_ref = self.build_virtual_machine(instance, [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.182475] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] for vif in network_info: [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self._sync_wrapper(fn, *args, **kwargs) [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.wait() [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self[:] = self._gt.wait() [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self._exit_event.wait() [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] result = hub.switch() [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.182873] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return self.greenlet.switch() [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] result = function(*args, **kwargs) [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] return func(*args, **kwargs) [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise e [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] nwinfo = self.network_api.allocate_for_instance( [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] created_port_ids = self._update_ports_for_instance( [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] with excutils.save_and_reraise_exception(): [ 576.183376] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] self.force_reraise() [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise self.value [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] updated_port = self._update_port( [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] _ensure_no_port_binding_failure(port) [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] raise exception.PortBindingFailed(port_id=port['id']) [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] nova.exception.PortBindingFailed: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. [ 576.183884] env[62109]: ERROR nova.compute.manager [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] [ 576.184236] env[62109]: DEBUG nova.compute.utils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.184801] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.464s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.186463] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Build of instance b52ff4f8-2341-493e-8587-b8d1d12efb7e was re-scheduled: Binding failed for port 186a3613-a077-4a81-8822-8c8b454bd666, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 576.186872] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 576.187125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquiring lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.187276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Acquired lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.187434] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.380544] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "226938f5-f903-4671-b7a3-c6f5a264506e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.380544] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "226938f5-f903-4671-b7a3-c6f5a264506e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.392884] env[62109]: INFO nova.compute.manager [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] [instance: a10c8e2e-9b5c-498e-81dc-ca69af0ff123] Took 1.03 seconds to deallocate network for instance. [ 576.416320] env[62109]: DEBUG nova.compute.manager [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Received event network-changed-06c37800-4b43-44ae-b1b0-e1b65c1b680b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.416505] env[62109]: DEBUG nova.compute.manager [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Refreshing instance network info cache due to event network-changed-06c37800-4b43-44ae-b1b0-e1b65c1b680b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 576.416712] env[62109]: DEBUG oslo_concurrency.lockutils [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] Acquiring lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.416833] env[62109]: DEBUG oslo_concurrency.lockutils [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] Acquired lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.417012] env[62109]: DEBUG nova.network.neutron [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Refreshing network info cache for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 576.493818] env[62109]: DEBUG nova.network.neutron [-] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.713885] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.794905] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.814010] env[62109]: DEBUG nova.compute.manager [None req-a486f4da-da42-4a0d-8667-b01c3e6e30f9 tempest-ServerDiagnosticsV248Test-1208550919 tempest-ServerDiagnosticsV248Test-1208550919-project-admin] [instance: 13988400-7b35-4175-9410-84eff918111d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 576.815122] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d84d23-d63d-4d07-93e6-82276ea8de4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.824212] env[62109]: INFO nova.compute.manager [None req-a486f4da-da42-4a0d-8667-b01c3e6e30f9 tempest-ServerDiagnosticsV248Test-1208550919 tempest-ServerDiagnosticsV248Test-1208550919-project-admin] [instance: 13988400-7b35-4175-9410-84eff918111d] Retrieving diagnostics [ 576.824949] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20acd6be-cb91-4c3f-accd-393ed0104794 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.941645] env[62109]: DEBUG nova.network.neutron [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.996975] env[62109]: INFO nova.compute.manager [-] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Took 1.02 seconds to deallocate network for instance. [ 577.002036] env[62109]: DEBUG nova.compute.claims [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 577.002223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.058658] env[62109]: DEBUG nova.network.neutron [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.218897] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 13988400-7b35-4175-9410-84eff918111d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.301867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Releasing lock "refresh_cache-b52ff4f8-2341-493e-8587-b8d1d12efb7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.302586] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 577.302586] env[62109]: DEBUG nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.302758] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 577.317758] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.428253] env[62109]: INFO nova.scheduler.client.report [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Deleted allocations for instance a10c8e2e-9b5c-498e-81dc-ca69af0ff123 [ 577.561328] env[62109]: DEBUG oslo_concurrency.lockutils [req-594027fa-aaaf-44f9-949c-c3cd0f679830 req-85a8849a-1834-40d8-b5a1-7ed11eb9e036 service nova] Releasing lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.722412] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a10c8e2e-9b5c-498e-81dc-ca69af0ff123 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 577.722590] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 46bb583c-bc67-4b18-903d-afbbf8248691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.824541] env[62109]: DEBUG nova.network.neutron [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.936771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd70840c-4661-42b6-bd3f-63472355d01f tempest-ServerExternalEventsTest-598638162 tempest-ServerExternalEventsTest-598638162-project-member] Lock "a10c8e2e-9b5c-498e-81dc-ca69af0ff123" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.794s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.225829] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance b52ff4f8-2341-493e-8587-b8d1d12efb7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 578.226174] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 578.226646] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 41eeecaa-8514-4550-989e-43db00dff6bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 578.226801] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 18d23737-e0e7-44c9-887e-6531acf496fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 578.327542] env[62109]: INFO nova.compute.manager [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] [instance: b52ff4f8-2341-493e-8587-b8d1d12efb7e] Took 1.02 seconds to deallocate network for instance. [ 578.441264] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 578.486234] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "f5f24014-2196-4c44-b947-a80ac75197de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.486460] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "f5f24014-2196-4c44-b947-a80ac75197de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.490431] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "13988400-7b35-4175-9410-84eff918111d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.490695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.490838] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "13988400-7b35-4175-9410-84eff918111d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.491030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.491214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.493066] env[62109]: INFO nova.compute.manager [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Terminating instance [ 578.494669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "refresh_cache-13988400-7b35-4175-9410-84eff918111d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.494823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquired lock "refresh_cache-13988400-7b35-4175-9410-84eff918111d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.494987] env[62109]: DEBUG nova.network.neutron [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 578.730747] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 578.969872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.022333] env[62109]: DEBUG nova.network.neutron [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 579.095193] env[62109]: DEBUG nova.network.neutron [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.240403] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 356e57cb-9e43-47e1-a02b-b81ff737883e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 579.364725] env[62109]: INFO nova.scheduler.client.report [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Deleted allocations for instance b52ff4f8-2341-493e-8587-b8d1d12efb7e [ 579.548678] env[62109]: DEBUG nova.compute.manager [req-4469615e-73a8-4472-ba37-157daf7644b9 req-e27b3b6e-4472-476e-b479-e1b7131adf8b service nova] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Received event network-vif-deleted-06c37800-4b43-44ae-b1b0-e1b65c1b680b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 579.598462] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Releasing lock "refresh_cache-13988400-7b35-4175-9410-84eff918111d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.598880] env[62109]: DEBUG nova.compute.manager [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 579.599977] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 579.600317] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63487c2e-5563-4942-bf68-aa6242413326 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.609230] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 579.609467] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b19eec6d-1b51-4b40-8109-5e6ffec4daad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.618046] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 579.618046] env[62109]: value = "task-1116108" [ 579.618046] env[62109]: _type = "Task" [ 579.618046] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.627180] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.743479] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 28e71e8c-2c47-4ea8-bd90-33eb064073e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 579.873712] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f90c0141-cbc3-4627-91cb-deafee88b19b tempest-ServerDiagnosticsNegativeTest-1022463319 tempest-ServerDiagnosticsNegativeTest-1022463319-project-member] Lock "b52ff4f8-2341-493e-8587-b8d1d12efb7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.382s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.135739] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116108, 'name': PowerOffVM_Task, 'duration_secs': 0.207235} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.136379] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 580.138530] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 580.138530] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f32ef0ac-c528-4d3f-97e8-af9cba450992 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.169483] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquiring lock "4c02989b-4638-41b8-bccb-f2605c883346" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.170609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "4c02989b-4638-41b8-bccb-f2605c883346" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.170609] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 580.171978] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 580.171978] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleting the datastore file [datastore2] 13988400-7b35-4175-9410-84eff918111d {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.171978] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74cddf77-d91c-403c-8454-1cdc40f83228 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.181763] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for the task: (returnval){ [ 580.181763] env[62109]: value = "task-1116110" [ 580.181763] env[62109]: _type = "Task" [ 580.181763] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.193914] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.247242] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 49137502-b0a0-49f8-affa-82f19caf34b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.379986] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 580.695656] env[62109]: DEBUG oslo_vmware.api [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Task: {'id': task-1116110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10732} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.695924] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 580.696222] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 580.696297] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 580.699360] env[62109]: INFO nova.compute.manager [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] [instance: 13988400-7b35-4175-9410-84eff918111d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 580.699360] env[62109]: DEBUG oslo.service.loopingcall [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 580.699360] env[62109]: DEBUG nova.compute.manager [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 580.699360] env[62109]: DEBUG nova.network.neutron [-] [instance: 13988400-7b35-4175-9410-84eff918111d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 580.743457] env[62109]: DEBUG nova.network.neutron [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.751745] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 58f76ca2-8f1b-4d9f-887b-1527ba70e91c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.908560] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.249057] env[62109]: DEBUG nova.network.neutron [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.256136] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 21efb09c-8d90-415c-815a-af6ce6707c97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 581.757506] env[62109]: INFO nova.compute.manager [-] [instance: 13988400-7b35-4175-9410-84eff918111d] Took 1.06 seconds to deallocate network for instance. [ 581.762841] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a6d094c3-8488-4437-8972-aa246809a5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 582.276787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.277655] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 693e6fa3-4d5f-47aa-8543-32f21001b78f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 582.786261] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance b60d334c-0834-4267-bb31-1f3c679a2e1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 583.296313] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a05a3519-0395-4e49-b655-a6c6d7bd85a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 583.799390] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance e50019d2-d9a1-4077-ba1a-7f7bde266058 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 584.304688] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 590e6f9a-b764-44b4-9117-3deff696a6aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 584.810976] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 226938f5-f903-4671-b7a3-c6f5a264506e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 584.811568] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 584.811568] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 585.174274] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b004d444-470b-48bb-aa39-451f360a1638 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.187301] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ea154c-a41f-4dd2-b725-b44acb53db97 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.225767] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a4f3d6-bbdf-477f-9cbb-597738d731b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.235442] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6123ce9-6589-4cb3-a4cc-8c25c9c36777 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.253586] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.763709] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.267916] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 586.268269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.083s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.268645] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.344s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.270507] env[62109]: INFO nova.compute.claims [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.434590] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.434861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.705692] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca03287-ffa4-424c-903c-ae2a0a4a830a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.719651] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab096487-8c9b-4fd5-b0c2-de4e0c68384f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.752980] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d00705-1d7f-4c53-9a40-16b535b85fc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.762072] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f7232a-6c63-4238-bd62-b746aeab5189 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.776770] env[62109]: DEBUG nova.compute.provider_tree [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.119129] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquiring lock "7ef14775-9be3-4275-b5ca-dad43aa61dd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.119346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "7ef14775-9be3-4275-b5ca-dad43aa61dd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.282465] env[62109]: DEBUG nova.scheduler.client.report [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.789548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.790091] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 588.794695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.578s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.304944] env[62109]: DEBUG nova.compute.utils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.309625] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 589.309799] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.432199] env[62109]: DEBUG nova.policy [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2ead56f5a21471faa89a36321ae54ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '31e378ebe02c4dde8eaeb3d13b5d02c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 589.642650] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f597fd02-f7ca-45eb-94da-71786e805021 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.652700] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996cbbf7-5807-45cc-b40d-66fa249dcc95 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.691566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb78e821-1163-43c9-979b-6d012f0dddcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.700943] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01768a75-d00b-4bf2-b40e-666c1743c40d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.716036] env[62109]: DEBUG nova.compute.provider_tree [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.810197] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 590.222336] env[62109]: DEBUG nova.scheduler.client.report [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.604353] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquiring lock "17fd1633-f327-47e9-905f-60c8c7446c7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.604587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "17fd1633-f327-47e9-905f-60c8c7446c7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.678121] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Successfully created port: 6355869c-d5f0-4430-b8df-a229b415cb3c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.731758] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.937s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.732462] env[62109]: ERROR nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Traceback (most recent call last): [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.driver.spawn(context, instance, image_meta, [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] vm_ref = self.build_virtual_machine(instance, [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.732462] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] for vif in network_info: [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self._sync_wrapper(fn, *args, **kwargs) [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.wait() [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self[:] = self._gt.wait() [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self._exit_event.wait() [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] result = hub.switch() [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.732850] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return self.greenlet.switch() [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] result = function(*args, **kwargs) [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] return func(*args, **kwargs) [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise e [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] nwinfo = self.network_api.allocate_for_instance( [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] created_port_ids = self._update_ports_for_instance( [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] with excutils.save_and_reraise_exception(): [ 590.733240] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] self.force_reraise() [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise self.value [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] updated_port = self._update_port( [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] _ensure_no_port_binding_failure(port) [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] raise exception.PortBindingFailed(port_id=port['id']) [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] nova.exception.PortBindingFailed: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. [ 590.733718] env[62109]: ERROR nova.compute.manager [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] [ 590.734116] env[62109]: DEBUG nova.compute.utils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 590.735495] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.907s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.743429] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Build of instance 46bb583c-bc67-4b18-903d-afbbf8248691 was re-scheduled: Binding failed for port c8e5852b-f51f-4721-bedd-abcab4c157ad, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 590.743913] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 590.744241] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquiring lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.744288] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Acquired lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.744437] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 590.819996] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 590.848417] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:52:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='213466023',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1275697750',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.849013] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.849013] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.849013] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.849185] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.849224] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.849413] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.850875] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.850875] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.850875] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.850875] env[62109]: DEBUG nova.virt.hardware [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.851627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54572db2-52ff-4e03-bcbe-55ce97eab64b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.861253] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80f9dc9-21ca-4655-9e7f-5a2ac9f32959 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.045850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "307cf522-173e-4bd8-8535-e4a6db6aa430" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.046090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "307cf522-173e-4bd8-8535-e4a6db6aa430" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.417494] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.632567] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.670195] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7dc8e8-6cf2-4dca-975a-9c9f994c1428 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.679896] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d78a42-40ac-4166-8505-7c8fa077a74c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.724703] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02c5518-7cfa-4266-9b36-59a412aac1ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.739727] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a83b7fe-5ba9-4779-8ee7-fc5b7bc4297d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.754870] env[62109]: DEBUG nova.compute.provider_tree [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.136078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Releasing lock "refresh_cache-46bb583c-bc67-4b18-903d-afbbf8248691" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.136479] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 592.136479] env[62109]: DEBUG nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 592.136635] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 592.204779] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.260026] env[62109]: DEBUG nova.scheduler.client.report [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.707972] env[62109]: DEBUG nova.network.neutron [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.767785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.032s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.768487] env[62109]: ERROR nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Traceback (most recent call last): [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.driver.spawn(context, instance, image_meta, [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] vm_ref = self.build_virtual_machine(instance, [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.768487] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] for vif in network_info: [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self._sync_wrapper(fn, *args, **kwargs) [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.wait() [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self[:] = self._gt.wait() [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self._exit_event.wait() [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] result = hub.switch() [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.768861] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return self.greenlet.switch() [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] result = function(*args, **kwargs) [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] return func(*args, **kwargs) [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise e [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] nwinfo = self.network_api.allocate_for_instance( [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] created_port_ids = self._update_ports_for_instance( [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] with excutils.save_and_reraise_exception(): [ 592.769263] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] self.force_reraise() [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise self.value [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] updated_port = self._update_port( [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] _ensure_no_port_binding_failure(port) [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] raise exception.PortBindingFailed(port_id=port['id']) [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] nova.exception.PortBindingFailed: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. [ 592.769610] env[62109]: ERROR nova.compute.manager [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] [ 592.769976] env[62109]: DEBUG nova.compute.utils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 592.773333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.055s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.774851] env[62109]: INFO nova.compute.claims [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.777836] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Build of instance 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4 was re-scheduled: Binding failed for port 29546bda-6d6d-4096-ad62-678eeaff25ce, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 592.778309] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 592.778534] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquiring lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.778678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Acquired lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.778834] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.213314] env[62109]: INFO nova.compute.manager [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] [instance: 46bb583c-bc67-4b18-903d-afbbf8248691] Took 1.08 seconds to deallocate network for instance. [ 593.335072] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.483571] env[62109]: ERROR nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 593.483571] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.483571] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.483571] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.483571] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.483571] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.483571] env[62109]: ERROR nova.compute.manager raise self.value [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.483571] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 593.483571] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.483571] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 593.484201] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.484201] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 593.484201] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 593.484201] env[62109]: ERROR nova.compute.manager [ 593.484201] env[62109]: Traceback (most recent call last): [ 593.484201] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 593.484201] env[62109]: listener.cb(fileno) [ 593.484201] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.484201] env[62109]: result = function(*args, **kwargs) [ 593.484201] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.484201] env[62109]: return func(*args, **kwargs) [ 593.484201] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.484201] env[62109]: raise e [ 593.484201] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.484201] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 593.484201] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.484201] env[62109]: created_port_ids = self._update_ports_for_instance( [ 593.484201] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.484201] env[62109]: with excutils.save_and_reraise_exception(): [ 593.484201] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.484201] env[62109]: self.force_reraise() [ 593.484201] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.484201] env[62109]: raise self.value [ 593.484201] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.484201] env[62109]: updated_port = self._update_port( [ 593.484201] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.484201] env[62109]: _ensure_no_port_binding_failure(port) [ 593.484201] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.484201] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 593.485577] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 593.485577] env[62109]: Removing descriptor: 16 [ 593.485577] env[62109]: ERROR nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Traceback (most recent call last): [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] yield resources [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.driver.spawn(context, instance, image_meta, [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.485577] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] vm_ref = self.build_virtual_machine(instance, [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] for vif in network_info: [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self._sync_wrapper(fn, *args, **kwargs) [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.wait() [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self[:] = self._gt.wait() [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self._exit_event.wait() [ 593.488381] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] result = hub.switch() [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self.greenlet.switch() [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] result = function(*args, **kwargs) [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return func(*args, **kwargs) [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise e [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] nwinfo = self.network_api.allocate_for_instance( [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.488850] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] created_port_ids = self._update_ports_for_instance( [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] with excutils.save_and_reraise_exception(): [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.force_reraise() [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise self.value [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] updated_port = self._update_port( [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] _ensure_no_port_binding_failure(port) [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.489246] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise exception.PortBindingFailed(port_id=port['id']) [ 593.490750] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 593.490750] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] [ 593.490750] env[62109]: INFO nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Terminating instance [ 593.490750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquiring lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.490750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquired lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.490750] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.749588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "02f52fdd-ece0-43a5-b7fd-be4172093698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.750458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "02f52fdd-ece0-43a5-b7fd-be4172093698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.771458] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.796337] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "17ee49a9-d980-46c0-996e-6a43c80be434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.797112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.837161] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.837378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.064924] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.251768] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903cd5e7-eae6-48a0-bfe4-4ba855f9ab94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.257648] env[62109]: INFO nova.scheduler.client.report [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Deleted allocations for instance 46bb583c-bc67-4b18-903d-afbbf8248691 [ 594.269756] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eaa4193-5918-49a1-8f41-cba1f6367d44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.274579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Releasing lock "refresh_cache-6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.275064] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 594.275064] env[62109]: DEBUG nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.275064] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 594.306582] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61600fe6-896d-43ed-99d9-8ee7076a6085 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.311311] env[62109]: DEBUG nova.compute.manager [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Received event network-changed-6355869c-d5f0-4430-b8df-a229b415cb3c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 594.311492] env[62109]: DEBUG nova.compute.manager [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Refreshing instance network info cache due to event network-changed-6355869c-d5f0-4430-b8df-a229b415cb3c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 594.311681] env[62109]: DEBUG oslo_concurrency.lockutils [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] Acquiring lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.319520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc8503-88b5-4523-b492-2acea4842c1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.335905] env[62109]: DEBUG nova.compute.provider_tree [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.356514] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.442899] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.775766] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da393a38-36b0-46d3-858e-eefa8474bf29 tempest-ServersAdminNegativeTestJSON-82295396 tempest-ServersAdminNegativeTestJSON-82295396-project-member] Lock "46bb583c-bc67-4b18-903d-afbbf8248691" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.188s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.839197] env[62109]: DEBUG nova.scheduler.client.report [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.860319] env[62109]: DEBUG nova.network.neutron [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.946303] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Releasing lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.947013] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 594.947224] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 594.947601] env[62109]: DEBUG oslo_concurrency.lockutils [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] Acquired lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.947705] env[62109]: DEBUG nova.network.neutron [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Refreshing network info cache for port 6355869c-d5f0-4430-b8df-a229b415cb3c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.948737] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a09e9f3-2430-413c-a658-7ec781c311e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.960026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844b43e3-2496-4a7a-a956-65b3b5fd998c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.985565] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e could not be found. [ 594.985881] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 594.986187] env[62109]: INFO nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 594.986648] env[62109]: DEBUG oslo.service.loopingcall [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.987241] env[62109]: DEBUG nova.compute.manager [-] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.987481] env[62109]: DEBUG nova.network.neutron [-] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 595.231117] env[62109]: DEBUG nova.network.neutron [-] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.279079] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 595.347753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.348326] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 595.355657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.836s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.358955] env[62109]: INFO nova.compute.claims [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.362386] env[62109]: INFO nova.compute.manager [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] [instance: 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4] Took 1.09 seconds to deallocate network for instance. [ 595.539050] env[62109]: DEBUG nova.network.neutron [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.734754] env[62109]: DEBUG nova.network.neutron [-] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.817098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.836171] env[62109]: DEBUG nova.network.neutron [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.857120] env[62109]: DEBUG nova.compute.utils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.859130] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 595.859130] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 596.031746] env[62109]: DEBUG nova.policy [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e711f5c587844af58112c6c2d24eb25a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '970f413ca7594369a0bddf71337e2aff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.241378] env[62109]: INFO nova.compute.manager [-] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Took 1.25 seconds to deallocate network for instance. [ 596.242405] env[62109]: DEBUG nova.compute.claims [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 596.242599] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.344439] env[62109]: DEBUG oslo_concurrency.lockutils [req-7dfb5d1d-9885-474e-b11e-045392f6d7bc req-a2f73a11-731a-425c-b318-c6d118c1095e service nova] Releasing lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.366389] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 596.408411] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.409963] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.424205] env[62109]: INFO nova.scheduler.client.report [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Deleted allocations for instance 6a9e1c1d-8de1-4a40-b32b-e3adf445ece4 [ 596.852041] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d931957-0ad6-434a-8bfd-594d0e71d52b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.862344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb62986b-9584-4b6e-bec4-f3cbe5332765 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.904309] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc55296-2d44-411a-8d80-e7cf6bde2364 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.916637] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f6f84e-f2d2-4956-8e4f-139da3f343cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.931421] env[62109]: DEBUG nova.compute.provider_tree [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.936653] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ed658c0b-b887-4faa-8c8f-bda7ffd4c6fa tempest-ServerDiagnosticsTest-1834000052 tempest-ServerDiagnosticsTest-1834000052-project-member] Lock "6a9e1c1d-8de1-4a40-b32b-e3adf445ece4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.328s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.087164] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Successfully created port: 41fc812d-5c4e-409c-bfa2-2fa230b4beea {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.411953] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.437170] env[62109]: DEBUG nova.scheduler.client.report [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.439074] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 597.446751] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.446986] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.447150] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.447325] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.447529] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.447596] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.447793] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.447977] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.448173] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.448334] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.448502] env[62109]: DEBUG nova.virt.hardware [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.449682] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d906ab-74d9-4a12-acba-2be38e370389 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.462013] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bc65e6-f744-4cab-b842-625552409410 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.948427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.948427] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 597.954534] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.497s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.978890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.086261] env[62109]: DEBUG nova.compute.manager [req-bd8712a5-c5e2-46cb-80dd-c387da531df6 req-0f28605b-ce7f-461a-8d82-74042680fcee service nova] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Received event network-vif-deleted-6355869c-d5f0-4430-b8df-a229b415cb3c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 598.354277] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.354277] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.357227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.357555] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.461525] env[62109]: DEBUG nova.compute.utils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.462928] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.463142] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 598.537929] env[62109]: DEBUG nova.policy [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b8ac755b9de4977a964f3b6cfe09416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '885390a7b4a94a9381d906709ff2d585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 598.970597] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 598.987844] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Successfully created port: 8c87bcce-c05b-48d6-8366-75225b049447 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.991368] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dc67b9-ac5b-40d7-85b6-9ff5f5444076 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.001557] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab97c3a-e193-4a6c-aa7e-56d28b0856d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.036882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c183abe7-3cd9-4baf-b16b-9011670b4ca3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.045112] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4401af9-ef73-455e-adb8-5f12ad944a39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.060920] env[62109]: DEBUG nova.compute.provider_tree [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.276137] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.276407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.565359] env[62109]: DEBUG nova.scheduler.client.report [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.652081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "c753a2db-d701-4508-88bd-4ebe4f32a075" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.653031] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.670770] env[62109]: ERROR nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 599.670770] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.670770] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.670770] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.670770] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.670770] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.670770] env[62109]: ERROR nova.compute.manager raise self.value [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.670770] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 599.670770] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.670770] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 599.671398] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.671398] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 599.671398] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 599.671398] env[62109]: ERROR nova.compute.manager [ 599.671398] env[62109]: Traceback (most recent call last): [ 599.671398] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 599.671398] env[62109]: listener.cb(fileno) [ 599.671398] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.671398] env[62109]: result = function(*args, **kwargs) [ 599.671398] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 599.671398] env[62109]: return func(*args, **kwargs) [ 599.671398] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.671398] env[62109]: raise e [ 599.671398] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.671398] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 599.671398] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.671398] env[62109]: created_port_ids = self._update_ports_for_instance( [ 599.671398] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.671398] env[62109]: with excutils.save_and_reraise_exception(): [ 599.671398] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.671398] env[62109]: self.force_reraise() [ 599.671398] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.671398] env[62109]: raise self.value [ 599.671398] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.671398] env[62109]: updated_port = self._update_port( [ 599.671398] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.671398] env[62109]: _ensure_no_port_binding_failure(port) [ 599.671398] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.671398] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 599.672253] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 599.672253] env[62109]: Removing descriptor: 15 [ 599.672253] env[62109]: ERROR nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Traceback (most recent call last): [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] yield resources [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.driver.spawn(context, instance, image_meta, [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 599.672253] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] vm_ref = self.build_virtual_machine(instance, [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] vif_infos = vmwarevif.get_vif_info(self._session, [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] for vif in network_info: [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self._sync_wrapper(fn, *args, **kwargs) [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.wait() [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self[:] = self._gt.wait() [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self._exit_event.wait() [ 599.672588] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] result = hub.switch() [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self.greenlet.switch() [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] result = function(*args, **kwargs) [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return func(*args, **kwargs) [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise e [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] nwinfo = self.network_api.allocate_for_instance( [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.672978] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] created_port_ids = self._update_ports_for_instance( [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] with excutils.save_and_reraise_exception(): [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.force_reraise() [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise self.value [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] updated_port = self._update_port( [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] _ensure_no_port_binding_failure(port) [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.673440] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise exception.PortBindingFailed(port_id=port['id']) [ 599.673776] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 599.673776] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] [ 599.673776] env[62109]: INFO nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Terminating instance [ 599.675958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquiring lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.676443] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquired lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.676443] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 599.983482] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.008923] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.009225] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.009502] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.009618] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.009800] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.009979] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.010236] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.010427] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.010620] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.010810] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.011818] env[62109]: DEBUG nova.virt.hardware [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.012259] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60259ce9-e03f-493c-b4e6-64e1056dca8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.021987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01492274-68f9-4c81-b3e5-51dc4f531ec6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.070782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.119s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.072252] env[62109]: ERROR nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Traceback (most recent call last): [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.driver.spawn(context, instance, image_meta, [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] vm_ref = self.build_virtual_machine(instance, [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.072252] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] for vif in network_info: [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self._sync_wrapper(fn, *args, **kwargs) [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.wait() [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self[:] = self._gt.wait() [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self._exit_event.wait() [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] result = hub.switch() [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.072688] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return self.greenlet.switch() [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] result = function(*args, **kwargs) [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] return func(*args, **kwargs) [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise e [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] nwinfo = self.network_api.allocate_for_instance( [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] created_port_ids = self._update_ports_for_instance( [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] with excutils.save_and_reraise_exception(): [ 600.073133] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] self.force_reraise() [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise self.value [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] updated_port = self._update_port( [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] _ensure_no_port_binding_failure(port) [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] raise exception.PortBindingFailed(port_id=port['id']) [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] nova.exception.PortBindingFailed: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. [ 600.073568] env[62109]: ERROR nova.compute.manager [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] [ 600.076817] env[62109]: DEBUG nova.compute.utils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.076817] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.994s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.080460] env[62109]: INFO nova.compute.claims [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.081077] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Build of instance 41eeecaa-8514-4550-989e-43db00dff6bb was re-scheduled: Binding failed for port 8a4d4c47-2ce5-46f4-b366-81723ff941c8, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 600.082375] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 600.083552] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquiring lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.083809] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Acquired lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.084264] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.161257] env[62109]: DEBUG nova.compute.manager [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Received event network-changed-41fc812d-5c4e-409c-bfa2-2fa230b4beea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.161507] env[62109]: DEBUG nova.compute.manager [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Refreshing instance network info cache due to event network-changed-41fc812d-5c4e-409c-bfa2-2fa230b4beea. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 600.161711] env[62109]: DEBUG oslo_concurrency.lockutils [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] Acquiring lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.193542] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.429276] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.635118] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.700960] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.701200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.729319] env[62109]: ERROR nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 600.729319] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.729319] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 600.729319] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 600.729319] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.729319] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.729319] env[62109]: ERROR nova.compute.manager raise self.value [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 600.729319] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 600.729319] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.729319] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 600.729860] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.729860] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 600.729860] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 600.729860] env[62109]: ERROR nova.compute.manager [ 600.729860] env[62109]: Traceback (most recent call last): [ 600.729860] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 600.729860] env[62109]: listener.cb(fileno) [ 600.729860] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.729860] env[62109]: result = function(*args, **kwargs) [ 600.729860] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.729860] env[62109]: return func(*args, **kwargs) [ 600.729860] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.729860] env[62109]: raise e [ 600.729860] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.729860] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 600.729860] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 600.729860] env[62109]: created_port_ids = self._update_ports_for_instance( [ 600.729860] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 600.729860] env[62109]: with excutils.save_and_reraise_exception(): [ 600.729860] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.729860] env[62109]: self.force_reraise() [ 600.729860] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.729860] env[62109]: raise self.value [ 600.729860] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 600.729860] env[62109]: updated_port = self._update_port( [ 600.729860] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.729860] env[62109]: _ensure_no_port_binding_failure(port) [ 600.729860] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.729860] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 600.730808] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 600.730808] env[62109]: Removing descriptor: 16 [ 600.730808] env[62109]: ERROR nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Traceback (most recent call last): [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] yield resources [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.driver.spawn(context, instance, image_meta, [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.730808] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] vm_ref = self.build_virtual_machine(instance, [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] for vif in network_info: [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self._sync_wrapper(fn, *args, **kwargs) [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.wait() [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self[:] = self._gt.wait() [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self._exit_event.wait() [ 600.731235] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] result = hub.switch() [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self.greenlet.switch() [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] result = function(*args, **kwargs) [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return func(*args, **kwargs) [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise e [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] nwinfo = self.network_api.allocate_for_instance( [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 600.731699] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] created_port_ids = self._update_ports_for_instance( [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] with excutils.save_and_reraise_exception(): [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.force_reraise() [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise self.value [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] updated_port = self._update_port( [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] _ensure_no_port_binding_failure(port) [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.732139] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise exception.PortBindingFailed(port_id=port['id']) [ 600.732594] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 600.732594] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] [ 600.732594] env[62109]: INFO nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Terminating instance [ 600.734193] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.734372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.734621] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.749633] env[62109]: DEBUG nova.compute.manager [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Received event network-changed-8c87bcce-c05b-48d6-8366-75225b049447 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.749817] env[62109]: DEBUG nova.compute.manager [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Refreshing instance network info cache due to event network-changed-8c87bcce-c05b-48d6-8366-75225b049447. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 600.749997] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] Acquiring lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.872479] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.933613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Releasing lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.934079] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.934298] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 600.934644] env[62109]: DEBUG oslo_concurrency.lockutils [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] Acquired lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.934975] env[62109]: DEBUG nova.network.neutron [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Refreshing network info cache for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 600.936192] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af76eb0d-4c7a-4ea6-bebe-76b1321e55fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.951785] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aaed12-1cf2-4888-b114-88706c247bba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.976828] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 356e57cb-9e43-47e1-a02b-b81ff737883e could not be found. [ 600.977069] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 600.979395] env[62109]: INFO nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 600.979395] env[62109]: DEBUG oslo.service.loopingcall [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.979395] env[62109]: DEBUG nova.compute.manager [-] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 600.979395] env[62109]: DEBUG nova.network.neutron [-] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 601.010422] env[62109]: DEBUG nova.network.neutron [-] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.259696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.259941] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.262359] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.374925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Releasing lock "refresh_cache-41eeecaa-8514-4550-989e-43db00dff6bb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.375416] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 601.375416] env[62109]: DEBUG nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.375596] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 601.404699] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.464869] env[62109]: DEBUG nova.network.neutron [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.514620] env[62109]: DEBUG nova.network.neutron [-] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.602532] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.647229] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb60288-deb4-495c-b247-ba50d17a0ae5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.658582] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edca17a-8b2c-4912-99cf-f8dcf89c0f40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.707023] env[62109]: DEBUG nova.network.neutron [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.707023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff8f7e7-17be-4436-837b-bc2c6c2f1dba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.722906] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b84630-e976-4e01-833e-242b0fb2a7be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.739630] env[62109]: DEBUG nova.compute.provider_tree [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.909964] env[62109]: DEBUG nova.network.neutron [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.019966] env[62109]: INFO nova.compute.manager [-] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Took 1.04 seconds to deallocate network for instance. [ 602.023258] env[62109]: DEBUG nova.compute.claims [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 602.023455] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.107312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.110166] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 602.110166] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 602.110166] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] Acquired lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.110166] env[62109]: DEBUG nova.network.neutron [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Refreshing network info cache for port 8c87bcce-c05b-48d6-8366-75225b049447 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.110166] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af6e8eef-7388-424d-a4cf-597d089e3e41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.127573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ae17f1-95f7-4748-be74-c673107e1c7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.153348] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49137502-b0a0-49f8-affa-82f19caf34b0 could not be found. [ 602.153620] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 602.153816] env[62109]: INFO nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 602.154079] env[62109]: DEBUG oslo.service.loopingcall [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.154658] env[62109]: DEBUG nova.compute.manager [-] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 602.154777] env[62109]: DEBUG nova.network.neutron [-] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 602.183815] env[62109]: DEBUG nova.network.neutron [-] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.213348] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "a9fb75d5-e303-4f31-888d-528963ab23b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.213737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.214762] env[62109]: DEBUG oslo_concurrency.lockutils [req-f9f77d61-67e1-49e4-ad2e-d5e1709cc910 req-340860cb-1cd2-4a12-879e-794408b139f9 service nova] Releasing lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.242770] env[62109]: DEBUG nova.scheduler.client.report [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.385945] env[62109]: DEBUG nova.compute.manager [req-60696302-97b0-4295-9dad-4524d7d372e5 req-086dc786-b19b-4e7f-b312-61e583727c9d service nova] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Received event network-vif-deleted-41fc812d-5c4e-409c-bfa2-2fa230b4beea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.416028] env[62109]: INFO nova.compute.manager [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] [instance: 41eeecaa-8514-4550-989e-43db00dff6bb] Took 1.04 seconds to deallocate network for instance. [ 602.642101] env[62109]: DEBUG nova.network.neutron [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.687852] env[62109]: DEBUG nova.network.neutron [-] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.750852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.751452] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 602.758521] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.756s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.775192] env[62109]: DEBUG nova.network.neutron [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.021021] env[62109]: DEBUG nova.compute.manager [req-03ecf7ab-88cb-4218-980f-8a0f66c19076 req-606a1d60-7a6e-422a-bbb9-434b9cf2c295 service nova] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Received event network-vif-deleted-8c87bcce-c05b-48d6-8366-75225b049447 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.193653] env[62109]: INFO nova.compute.manager [-] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Took 1.04 seconds to deallocate network for instance. [ 603.194663] env[62109]: DEBUG nova.compute.claims [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 603.194909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.263582] env[62109]: DEBUG nova.compute.utils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.265600] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.266243] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 603.279958] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8611f3-7058-451c-9b09-9e75112731b0 req-a56c4f13-faaa-41e4-9304-8d2ef72a07b4 service nova] Releasing lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.365771] env[62109]: DEBUG nova.policy [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53d26e0515864175963777284ca6d342', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88ac845ffffe44d9a1127254f08fce91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.457888] env[62109]: INFO nova.scheduler.client.report [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Deleted allocations for instance 41eeecaa-8514-4550-989e-43db00dff6bb [ 603.741528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.741794] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.776739] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 603.837846] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Successfully created port: 59c8d794-2ad6-4cce-900a-4e99a1b46eb6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.897405] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f190f1-6e30-4af2-bb20-e87700128179 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.909746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5b0bee-360b-4031-977e-87e9e0e6ecba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.945237] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4a28ff-81d7-48b0-962b-e8115926dd00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.953764] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b147e10d-5aaa-41a4-968d-3ad1835356c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.969800] env[62109]: DEBUG nova.compute.provider_tree [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.974295] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a63ce895-9982-4d65-b709-7136740b3c3c tempest-VolumesAssistedSnapshotsTest-11929798 tempest-VolumesAssistedSnapshotsTest-11929798-project-member] Lock "41eeecaa-8514-4550-989e-43db00dff6bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.591s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.475646] env[62109]: DEBUG nova.scheduler.client.report [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.479915] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.679975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.680225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.787837] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 604.816366] env[62109]: ERROR nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 604.816366] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.816366] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.816366] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.816366] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.816366] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.816366] env[62109]: ERROR nova.compute.manager raise self.value [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.816366] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 604.816366] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.816366] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 604.816943] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.816943] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 604.816943] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 604.816943] env[62109]: ERROR nova.compute.manager [ 604.816943] env[62109]: Traceback (most recent call last): [ 604.816943] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 604.816943] env[62109]: listener.cb(fileno) [ 604.816943] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.816943] env[62109]: result = function(*args, **kwargs) [ 604.816943] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.816943] env[62109]: return func(*args, **kwargs) [ 604.816943] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.816943] env[62109]: raise e [ 604.816943] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.816943] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 604.816943] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.816943] env[62109]: created_port_ids = self._update_ports_for_instance( [ 604.816943] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.816943] env[62109]: with excutils.save_and_reraise_exception(): [ 604.816943] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.816943] env[62109]: self.force_reraise() [ 604.816943] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.816943] env[62109]: raise self.value [ 604.816943] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.816943] env[62109]: updated_port = self._update_port( [ 604.816943] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.816943] env[62109]: _ensure_no_port_binding_failure(port) [ 604.816943] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.816943] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 604.818037] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 604.818037] env[62109]: Removing descriptor: 16 [ 604.825893] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.826125] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.826296] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.826485] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.826630] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.826780] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.826984] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.827161] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.827328] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.827492] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.827663] env[62109]: DEBUG nova.virt.hardware [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.828688] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895ce8da-52aa-4be1-9aa5-edc5b737c0e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.837642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53815f31-186b-4143-b279-c902ea1f3e8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.855524] env[62109]: ERROR nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Traceback (most recent call last): [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] yield resources [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.driver.spawn(context, instance, image_meta, [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] vm_ref = self.build_virtual_machine(instance, [ 604.855524] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] for vif in network_info: [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return self._sync_wrapper(fn, *args, **kwargs) [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.wait() [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self[:] = self._gt.wait() [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return self._exit_event.wait() [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 604.855917] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] current.throw(*self._exc) [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] result = function(*args, **kwargs) [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return func(*args, **kwargs) [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise e [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] nwinfo = self.network_api.allocate_for_instance( [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] created_port_ids = self._update_ports_for_instance( [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] with excutils.save_and_reraise_exception(): [ 604.856396] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.force_reraise() [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise self.value [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] updated_port = self._update_port( [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] _ensure_no_port_binding_failure(port) [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise exception.PortBindingFailed(port_id=port['id']) [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 604.856815] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] [ 604.856815] env[62109]: INFO nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Terminating instance [ 604.858835] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.859514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.859514] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.989018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.229s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.989018] env[62109]: ERROR nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Traceback (most recent call last): [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.driver.spawn(context, instance, image_meta, [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.989018] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] vm_ref = self.build_virtual_machine(instance, [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] for vif in network_info: [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self._sync_wrapper(fn, *args, **kwargs) [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.wait() [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self[:] = self._gt.wait() [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self._exit_event.wait() [ 604.989439] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] result = hub.switch() [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return self.greenlet.switch() [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] result = function(*args, **kwargs) [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] return func(*args, **kwargs) [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise e [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] nwinfo = self.network_api.allocate_for_instance( [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.989844] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] created_port_ids = self._update_ports_for_instance( [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] with excutils.save_and_reraise_exception(): [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] self.force_reraise() [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise self.value [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] updated_port = self._update_port( [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] _ensure_no_port_binding_failure(port) [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.990250] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] raise exception.PortBindingFailed(port_id=port['id']) [ 604.990611] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] nova.exception.PortBindingFailed: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. [ 604.990611] env[62109]: ERROR nova.compute.manager [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] [ 604.990611] env[62109]: DEBUG nova.compute.utils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 604.990611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.021s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.992362] env[62109]: INFO nova.compute.claims [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.001088] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Build of instance 18d23737-e0e7-44c9-887e-6531acf496fa was re-scheduled: Binding failed for port 06c37800-4b43-44ae-b1b0-e1b65c1b680b, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 605.001550] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 605.001775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquiring lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.001920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Acquired lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.002103] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.031671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.289620] env[62109]: DEBUG nova.compute.manager [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Received event network-changed-59c8d794-2ad6-4cce-900a-4e99a1b46eb6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.289796] env[62109]: DEBUG nova.compute.manager [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Refreshing instance network info cache due to event network-changed-59c8d794-2ad6-4cce-900a-4e99a1b46eb6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 605.289997] env[62109]: DEBUG oslo_concurrency.lockutils [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] Acquiring lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.379150] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.445466] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.530485] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.620675] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.948016] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.948980] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.950277] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 605.950277] env[62109]: DEBUG oslo_concurrency.lockutils [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] Acquired lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.950277] env[62109]: DEBUG nova.network.neutron [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Refreshing network info cache for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 605.951333] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-526a19ff-886a-4357-9dc1-739695361658 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.962207] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1036f2ba-c251-4484-ad0f-071d3f066560 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.993825] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28e71e8c-2c47-4ea8-bd90-33eb064073e5 could not be found. [ 605.993825] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 605.993825] env[62109]: INFO nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 605.993825] env[62109]: DEBUG oslo.service.loopingcall [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.993825] env[62109]: DEBUG nova.compute.manager [-] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.993825] env[62109]: DEBUG nova.network.neutron [-] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.009873] env[62109]: DEBUG nova.network.neutron [-] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.130064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Releasing lock "refresh_cache-18d23737-e0e7-44c9-887e-6531acf496fa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.130398] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 606.130871] env[62109]: DEBUG nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.131082] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.150034] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.480438] env[62109]: DEBUG nova.network.neutron [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.503755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.503755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.514258] env[62109]: DEBUG nova.network.neutron [-] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.574983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51277405-a94b-4d51-9eef-7c9fc2df4dde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.583074] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a27530-fd33-4582-9294-effbe84a21ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.588072] env[62109]: DEBUG nova.network.neutron [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.617860] env[62109]: DEBUG oslo_concurrency.lockutils [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] Releasing lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.617860] env[62109]: DEBUG nova.compute.manager [req-43e82c4a-e587-46ed-925d-6ab7a56cdf21 req-9dff13a9-0361-4e84-8548-da071b7debfc service nova] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Received event network-vif-deleted-59c8d794-2ad6-4cce-900a-4e99a1b46eb6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.618784] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab475280-9183-4c81-ab02-687bbe65b295 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.626536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce256879-0b04-45e8-976d-bac95bbc9248 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.642145] env[62109]: DEBUG nova.compute.provider_tree [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.658590] env[62109]: DEBUG nova.network.neutron [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.022014] env[62109]: INFO nova.compute.manager [-] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Took 1.03 seconds to deallocate network for instance. [ 607.023868] env[62109]: DEBUG nova.compute.claims [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.024252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.147298] env[62109]: DEBUG nova.scheduler.client.report [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.161767] env[62109]: INFO nova.compute.manager [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] [instance: 18d23737-e0e7-44c9-887e-6531acf496fa] Took 1.03 seconds to deallocate network for instance. [ 607.653346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.653506] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 607.656111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.748s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.657997] env[62109]: INFO nova.compute.claims [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.163155] env[62109]: DEBUG nova.compute.utils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 608.168322] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 608.168576] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 608.210233] env[62109]: INFO nova.scheduler.client.report [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Deleted allocations for instance 18d23737-e0e7-44c9-887e-6531acf496fa [ 608.324297] env[62109]: DEBUG nova.policy [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '819eb84ac9a74ce8af1b0e70aa345432', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bba5ec42ee6f4aefadbac4ab4e0c379b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 608.656504] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Successfully created port: a6986e60-16bd-476e-b76c-ace1af4b54c8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.669567] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 608.722109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f9efeb2c-30d2-4d96-bdd7-4a9cadbceaaa tempest-InstanceActionsTestJSON-1226643893 tempest-InstanceActionsTestJSON-1226643893-project-member] Lock "18d23737-e0e7-44c9-887e-6531acf496fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.499s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.225356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b3982f-570e-4e0f-b4f7-c5bdcf4c5844 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.230333] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 609.236523] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a000daad-04f3-4314-b5bf-8c0ec8ccbc48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.273559] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c88e82-2ed6-4568-a883-1da9798d1a5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.282017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65ec2ab-a18f-499e-9aaf-02d4c0c456f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.297446] env[62109]: DEBUG nova.compute.provider_tree [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.687651] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 609.720106] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 609.720106] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 609.720106] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.720382] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 609.720382] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.720382] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 609.720924] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 609.721302] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 609.721658] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 609.721970] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 609.722569] env[62109]: DEBUG nova.virt.hardware [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 609.724017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08638061-79b9-48ae-8bee-47ea5134283f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.737902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e69f6d-8037-49a0-9944-c7c10fa356d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.761130] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.802079] env[62109]: DEBUG nova.scheduler.client.report [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.245234] env[62109]: DEBUG nova.compute.manager [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Received event network-changed-a6986e60-16bd-476e-b76c-ace1af4b54c8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.245534] env[62109]: DEBUG nova.compute.manager [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Refreshing instance network info cache due to event network-changed-a6986e60-16bd-476e-b76c-ace1af4b54c8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 610.245776] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] Acquiring lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.245776] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] Acquired lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.246102] env[62109]: DEBUG nova.network.neutron [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Refreshing network info cache for port a6986e60-16bd-476e-b76c-ace1af4b54c8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.305574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.306114] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.308740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.032s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.308974] env[62109]: DEBUG nova.objects.instance [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lazy-loading 'resources' on Instance uuid 13988400-7b35-4175-9410-84eff918111d {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 610.816781] env[62109]: DEBUG nova.compute.utils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.823396] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.823396] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 610.828219] env[62109]: ERROR nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 610.828219] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.828219] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 610.828219] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 610.828219] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.828219] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.828219] env[62109]: ERROR nova.compute.manager raise self.value [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 610.828219] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 610.828219] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.828219] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 610.828812] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.828812] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 610.828812] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 610.828812] env[62109]: ERROR nova.compute.manager [ 610.828812] env[62109]: Traceback (most recent call last): [ 610.828812] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 610.828812] env[62109]: listener.cb(fileno) [ 610.828812] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.828812] env[62109]: result = function(*args, **kwargs) [ 610.828812] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 610.828812] env[62109]: return func(*args, **kwargs) [ 610.828812] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.828812] env[62109]: raise e [ 610.828812] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.828812] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 610.828812] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 610.828812] env[62109]: created_port_ids = self._update_ports_for_instance( [ 610.828812] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 610.828812] env[62109]: with excutils.save_and_reraise_exception(): [ 610.828812] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.828812] env[62109]: self.force_reraise() [ 610.828812] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.828812] env[62109]: raise self.value [ 610.828812] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 610.828812] env[62109]: updated_port = self._update_port( [ 610.828812] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.828812] env[62109]: _ensure_no_port_binding_failure(port) [ 610.828812] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.828812] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 610.830123] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 610.830123] env[62109]: Removing descriptor: 16 [ 610.830123] env[62109]: ERROR nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Traceback (most recent call last): [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] yield resources [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.driver.spawn(context, instance, image_meta, [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.830123] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] vm_ref = self.build_virtual_machine(instance, [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] for vif in network_info: [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self._sync_wrapper(fn, *args, **kwargs) [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.wait() [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self[:] = self._gt.wait() [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self._exit_event.wait() [ 610.830775] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] result = hub.switch() [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self.greenlet.switch() [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] result = function(*args, **kwargs) [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return func(*args, **kwargs) [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise e [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] nwinfo = self.network_api.allocate_for_instance( [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 610.831292] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] created_port_ids = self._update_ports_for_instance( [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] with excutils.save_and_reraise_exception(): [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.force_reraise() [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise self.value [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] updated_port = self._update_port( [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] _ensure_no_port_binding_failure(port) [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.833169] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise exception.PortBindingFailed(port_id=port['id']) [ 610.833804] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 610.833804] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] [ 610.833804] env[62109]: INFO nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Terminating instance [ 610.833804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquiring lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.840273] env[62109]: DEBUG nova.network.neutron [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.895890] env[62109]: DEBUG nova.policy [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c56b67b9d7b84ed993773ae51096a5bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11044f23cd8243e7bbc9f29d6685bfcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.088607] env[62109]: DEBUG nova.network.neutron [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.327042] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.385037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0110aef2-128d-414d-bb4a-b7f39a74cc59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.393177] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a49ffb3-bbb9-46ca-a9e5-5ee2a595c2ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.425291] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a1505d-ea0a-41ee-a963-52b0a118a683 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.431109] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4223be71-1c95-4b5b-bdfe-243800673477 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.446207] env[62109]: DEBUG nova.compute.provider_tree [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.460267] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Successfully created port: 5fee0ba0-a98f-41d2-a74f-a40061ef22ab {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.587886] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b985559-36e2-4e26-8427-20c76dcf4fb8 req-13e3a4e4-987b-4920-bdbf-7817745e9133 service nova] Releasing lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.588340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquired lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.588575] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 611.949998] env[62109]: DEBUG nova.scheduler.client.report [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.130710] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 612.342388] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.364241] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.380918] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.381240] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.381421] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.381605] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.381928] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.382161] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.382380] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.382539] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.382698] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.382856] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.383037] env[62109]: DEBUG nova.virt.hardware [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.384046] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8410ee71-a5a2-400a-9e06-17b3845a5e54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.394811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8e9711-b675-4b1f-a365-8df66bf42386 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.456478] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.458898] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.642s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.460931] env[62109]: INFO nova.compute.claims [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.519307] env[62109]: INFO nova.scheduler.client.report [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Deleted allocations for instance 13988400-7b35-4175-9410-84eff918111d [ 612.579053] env[62109]: DEBUG nova.compute.manager [req-f1c3c8b9-118d-42f2-a9a8-d36c158216b6 req-d093c65f-9b4e-43df-9e65-1134c1a64f5d service nova] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Received event network-vif-deleted-a6986e60-16bd-476e-b76c-ace1af4b54c8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.870730] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Releasing lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.871177] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 612.871363] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 612.871756] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5cb492b-4088-462b-91d4-a462970d0440 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.882090] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5570936-11df-4047-af09-f5c5a342c62b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.909718] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 58f76ca2-8f1b-4d9f-887b-1527ba70e91c could not be found. [ 612.910236] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 612.910433] env[62109]: INFO nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 612.910687] env[62109]: DEBUG oslo.service.loopingcall [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.910920] env[62109]: DEBUG nova.compute.manager [-] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 612.911027] env[62109]: DEBUG nova.network.neutron [-] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 612.944920] env[62109]: DEBUG nova.network.neutron [-] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.034021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1d0cccd8-6df3-4e4d-918f-49127175cdf9 tempest-ServerDiagnosticsV248Test-1213643326 tempest-ServerDiagnosticsV248Test-1213643326-project-member] Lock "13988400-7b35-4175-9410-84eff918111d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.543s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.095985] env[62109]: ERROR nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 613.095985] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.095985] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.095985] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.095985] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.095985] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.095985] env[62109]: ERROR nova.compute.manager raise self.value [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.095985] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.095985] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.095985] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.098568] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.098568] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.098568] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 613.098568] env[62109]: ERROR nova.compute.manager [ 613.098568] env[62109]: Traceback (most recent call last): [ 613.098568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.098568] env[62109]: listener.cb(fileno) [ 613.098568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.098568] env[62109]: result = function(*args, **kwargs) [ 613.098568] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.098568] env[62109]: return func(*args, **kwargs) [ 613.098568] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.098568] env[62109]: raise e [ 613.098568] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.098568] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 613.098568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.098568] env[62109]: created_port_ids = self._update_ports_for_instance( [ 613.098568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.098568] env[62109]: with excutils.save_and_reraise_exception(): [ 613.098568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.098568] env[62109]: self.force_reraise() [ 613.098568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.098568] env[62109]: raise self.value [ 613.098568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.098568] env[62109]: updated_port = self._update_port( [ 613.098568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.098568] env[62109]: _ensure_no_port_binding_failure(port) [ 613.098568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.098568] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.099551] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 613.099551] env[62109]: Removing descriptor: 18 [ 613.099551] env[62109]: ERROR nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Traceback (most recent call last): [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] yield resources [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.driver.spawn(context, instance, image_meta, [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.099551] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] vm_ref = self.build_virtual_machine(instance, [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] for vif in network_info: [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self._sync_wrapper(fn, *args, **kwargs) [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.wait() [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self[:] = self._gt.wait() [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self._exit_event.wait() [ 613.099936] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] result = hub.switch() [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self.greenlet.switch() [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] result = function(*args, **kwargs) [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return func(*args, **kwargs) [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise e [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] nwinfo = self.network_api.allocate_for_instance( [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.100398] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] created_port_ids = self._update_ports_for_instance( [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] with excutils.save_and_reraise_exception(): [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.force_reraise() [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise self.value [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] updated_port = self._update_port( [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] _ensure_no_port_binding_failure(port) [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.100871] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise exception.PortBindingFailed(port_id=port['id']) [ 613.102108] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 613.102108] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] [ 613.102108] env[62109]: INFO nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Terminating instance [ 613.102108] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquiring lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.102108] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquired lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.102108] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.447231] env[62109]: DEBUG nova.network.neutron [-] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.626815] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.748400] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.953294] env[62109]: INFO nova.compute.manager [-] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Took 1.04 seconds to deallocate network for instance. [ 613.959074] env[62109]: DEBUG nova.compute.claims [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 613.959273] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.021046] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e55b5c9-544b-4f15-bfb4-db6a08eb1e3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.035505] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794a9032-6761-4eb8-92c2-50b7db02eacd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.066216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb894d68-cf7a-46fd-a88f-274d9d5b0111 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.073658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cc498e-ff6f-4e47-9303-8fa3d9a13289 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.087275] env[62109]: DEBUG nova.compute.provider_tree [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.250151] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Releasing lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.250582] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.250779] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 614.251141] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4b23e88-e41f-4f5a-8fd8-f9cb472fac61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.265886] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d096bb-d4ac-4d73-b225-5075dc94418b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.303587] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21efb09c-8d90-415c-815a-af6ce6707c97 could not be found. [ 614.303587] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 614.303587] env[62109]: INFO nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Took 0.05 seconds to destroy the instance on the hypervisor. [ 614.303587] env[62109]: DEBUG oslo.service.loopingcall [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.303587] env[62109]: DEBUG nova.compute.manager [-] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.303587] env[62109]: DEBUG nova.network.neutron [-] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 614.333357] env[62109]: DEBUG nova.network.neutron [-] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.593235] env[62109]: DEBUG nova.scheduler.client.report [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.774352] env[62109]: DEBUG nova.compute.manager [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Received event network-changed-5fee0ba0-a98f-41d2-a74f-a40061ef22ab {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 614.774352] env[62109]: DEBUG nova.compute.manager [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Refreshing instance network info cache due to event network-changed-5fee0ba0-a98f-41d2-a74f-a40061ef22ab. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 614.774352] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] Acquiring lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.774352] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] Acquired lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.774352] env[62109]: DEBUG nova.network.neutron [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Refreshing network info cache for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 614.835366] env[62109]: DEBUG nova.network.neutron [-] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.100306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.100871] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 615.106355] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.862s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.299146] env[62109]: DEBUG nova.network.neutron [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.338709] env[62109]: INFO nova.compute.manager [-] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Took 1.04 seconds to deallocate network for instance. [ 615.341767] env[62109]: DEBUG nova.compute.claims [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.341767] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.403405] env[62109]: DEBUG nova.network.neutron [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.614448] env[62109]: DEBUG nova.compute.utils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.620326] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 615.620613] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 615.673498] env[62109]: DEBUG nova.policy [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b8ac755b9de4977a964f3b6cfe09416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '885390a7b4a94a9381d906709ff2d585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 615.907416] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] Releasing lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.907714] env[62109]: DEBUG nova.compute.manager [req-c4070046-3277-413a-8201-83f57159c7de req-6c27d221-0ed1-4a34-956d-a5bbfd939b97 service nova] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Received event network-vif-deleted-5fee0ba0-a98f-41d2-a74f-a40061ef22ab {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.964987] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Successfully created port: 022476a0-892b-4e81-afca-c01cc29d6f07 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.099957] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8c39ba-1427-4a12-b656-de14ce2fff65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.108711] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e3efdc-ab52-4f1b-94b1-b802337557ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.142834] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 616.146183] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cf7c22-6638-4585-837f-b3aee268ff28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.155210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26d025d-d4a2-451b-8e36-27d416c5c249 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.172799] env[62109]: DEBUG nova.compute.provider_tree [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.365051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.365575] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.677554] env[62109]: DEBUG nova.scheduler.client.report [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.802849] env[62109]: DEBUG nova.compute.manager [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Received event network-changed-022476a0-892b-4e81-afca-c01cc29d6f07 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.803063] env[62109]: DEBUG nova.compute.manager [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Refreshing instance network info cache due to event network-changed-022476a0-892b-4e81-afca-c01cc29d6f07. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 616.803275] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] Acquiring lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.803447] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] Acquired lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.803621] env[62109]: DEBUG nova.network.neutron [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Refreshing network info cache for port 022476a0-892b-4e81-afca-c01cc29d6f07 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.987176] env[62109]: ERROR nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 616.987176] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.987176] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 616.987176] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 616.987176] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.987176] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.987176] env[62109]: ERROR nova.compute.manager raise self.value [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 616.987176] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.987176] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.987176] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.987989] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.987989] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.987989] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 616.987989] env[62109]: ERROR nova.compute.manager [ 616.987989] env[62109]: Traceback (most recent call last): [ 616.987989] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.987989] env[62109]: listener.cb(fileno) [ 616.987989] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.987989] env[62109]: result = function(*args, **kwargs) [ 616.987989] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 616.987989] env[62109]: return func(*args, **kwargs) [ 616.987989] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.987989] env[62109]: raise e [ 616.987989] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.987989] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 616.987989] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 616.987989] env[62109]: created_port_ids = self._update_ports_for_instance( [ 616.987989] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 616.987989] env[62109]: with excutils.save_and_reraise_exception(): [ 616.987989] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.987989] env[62109]: self.force_reraise() [ 616.987989] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.987989] env[62109]: raise self.value [ 616.987989] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 616.987989] env[62109]: updated_port = self._update_port( [ 616.987989] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.987989] env[62109]: _ensure_no_port_binding_failure(port) [ 616.987989] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.987989] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.989251] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 616.989251] env[62109]: Removing descriptor: 18 [ 617.154923] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 617.185189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.081s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.186521] env[62109]: ERROR nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Traceback (most recent call last): [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.driver.spawn(context, instance, image_meta, [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] vm_ref = self.build_virtual_machine(instance, [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.186521] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] for vif in network_info: [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self._sync_wrapper(fn, *args, **kwargs) [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.wait() [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self[:] = self._gt.wait() [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self._exit_event.wait() [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] result = hub.switch() [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.186922] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return self.greenlet.switch() [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] result = function(*args, **kwargs) [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] return func(*args, **kwargs) [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise e [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] nwinfo = self.network_api.allocate_for_instance( [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] created_port_ids = self._update_ports_for_instance( [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] with excutils.save_and_reraise_exception(): [ 617.187326] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] self.force_reraise() [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise self.value [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] updated_port = self._update_port( [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] _ensure_no_port_binding_failure(port) [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] raise exception.PortBindingFailed(port_id=port['id']) [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] nova.exception.PortBindingFailed: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. [ 617.187724] env[62109]: ERROR nova.compute.manager [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] [ 617.188122] env[62109]: DEBUG nova.compute.utils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.190492] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.190774] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.190874] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.191150] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.191335] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.191521] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.191730] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.191888] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.192126] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.192364] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.192561] env[62109]: DEBUG nova.virt.hardware [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.192907] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.214s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.195077] env[62109]: INFO nova.compute.claims [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.202439] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfb90bd-e84e-405a-8fde-38c1764705be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.209363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d412d2-18be-4aba-902d-37a139343ab5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.214214] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Build of instance a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e was re-scheduled: Binding failed for port 6355869c-d5f0-4430-b8df-a229b415cb3c, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.214905] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.216029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquiring lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.216436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Acquired lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.216664] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.233253] env[62109]: ERROR nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Traceback (most recent call last): [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] yield resources [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.driver.spawn(context, instance, image_meta, [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] vm_ref = self.build_virtual_machine(instance, [ 617.233253] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] for vif in network_info: [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return self._sync_wrapper(fn, *args, **kwargs) [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.wait() [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self[:] = self._gt.wait() [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return self._exit_event.wait() [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 617.233885] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] current.throw(*self._exc) [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] result = function(*args, **kwargs) [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return func(*args, **kwargs) [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise e [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] nwinfo = self.network_api.allocate_for_instance( [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] created_port_ids = self._update_ports_for_instance( [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] with excutils.save_and_reraise_exception(): [ 617.234307] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.force_reraise() [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise self.value [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] updated_port = self._update_port( [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] _ensure_no_port_binding_failure(port) [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise exception.PortBindingFailed(port_id=port['id']) [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 617.234705] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] [ 617.234705] env[62109]: INFO nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Terminating instance [ 617.236142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.333519] env[62109]: DEBUG nova.network.neutron [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.432289] env[62109]: DEBUG nova.network.neutron [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.741561] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.831516] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.937919] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2458d5a-6ff2-440c-9045-af8e8fb73c1c req-fcc74d01-7664-4837-800f-ba4cc610f02c service nova] Releasing lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.938357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.938544] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.335647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Releasing lock "refresh_cache-a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.335858] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.336042] env[62109]: DEBUG nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.336206] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.366055] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.469020] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.665163] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.743092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b116761-d835-4cbe-9c4f-934c28b5ab32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.749935] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973953c9-9faf-4f78-93c6-eca51b2264e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.783554] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5424e467-de19-4e47-965b-88c87160cd39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.792181] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f60712-6da5-4f86-9c19-9c76f7a4f823 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.808889] env[62109]: DEBUG nova.compute.provider_tree [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.852695] env[62109]: DEBUG nova.compute.manager [req-ab947212-f940-4a9d-b160-0b67dc45f3c0 req-105f3c7b-d729-49a5-9344-642b8b6ff078 service nova] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Received event network-vif-deleted-022476a0-892b-4e81-afca-c01cc29d6f07 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.870528] env[62109]: DEBUG nova.network.neutron [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.171461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.171461] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 619.171461] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 619.171461] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fe15bb8-01bf-48b1-bd94-ff7a856f6bff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.182782] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a410189-8950-48bb-9df1-450d04d85e07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.213385] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6d094c3-8488-4437-8972-aa246809a5b1 could not be found. [ 619.213627] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 619.213817] env[62109]: INFO nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 619.214089] env[62109]: DEBUG oslo.service.loopingcall [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.214323] env[62109]: DEBUG nova.compute.manager [-] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 619.214416] env[62109]: DEBUG nova.network.neutron [-] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 619.306059] env[62109]: DEBUG nova.network.neutron [-] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.312945] env[62109]: DEBUG nova.scheduler.client.report [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.372426] env[62109]: INFO nova.compute.manager [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] [instance: a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e] Took 1.04 seconds to deallocate network for instance. [ 619.809636] env[62109]: DEBUG nova.network.neutron [-] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.818504] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.819019] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 619.823300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.798s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.324577] env[62109]: INFO nova.compute.manager [-] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Took 1.11 seconds to deallocate network for instance. [ 620.325515] env[62109]: DEBUG nova.compute.claims [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 620.325515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.327051] env[62109]: DEBUG nova.compute.utils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.331161] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 620.331342] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.400615] env[62109]: DEBUG nova.policy [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e93cb95a977642848692814515edc959', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94f752f777564cc58e9058e4c214a6e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 620.416051] env[62109]: INFO nova.scheduler.client.report [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Deleted allocations for instance a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e [ 620.788855] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Successfully created port: c0d41346-4c78-4c41-b1d3-ee4114fc60c7 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.833891] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 620.903016] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0ca676-0d5c-462d-aecc-96c9be63269c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.915285] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82c76c1-4566-4db0-b1c5-ff08ac1680a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.949460] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75ecc396-4e4b-4ce7-9037-a3a48c28866b tempest-ServersWithSpecificFlavorTestJSON-252485133 tempest-ServersWithSpecificFlavorTestJSON-252485133-project-member] Lock "a13cb0f0-5e0d-46eb-ae42-d9f9bad3832e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.070s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.951504] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50743a4b-f045-4f08-a1ed-7981efa39bc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.960960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f298539f-22fa-4225-a147-d1fa4831dff3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.973464] env[62109]: DEBUG nova.compute.provider_tree [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.454621] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.479767] env[62109]: DEBUG nova.scheduler.client.report [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.746447] env[62109]: ERROR nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 621.746447] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.746447] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.746447] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.746447] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.746447] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.746447] env[62109]: ERROR nova.compute.manager raise self.value [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.746447] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 621.746447] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.746447] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 621.747082] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.747082] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 621.747082] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 621.747082] env[62109]: ERROR nova.compute.manager [ 621.747562] env[62109]: Traceback (most recent call last): [ 621.747639] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 621.747639] env[62109]: listener.cb(fileno) [ 621.747639] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.747639] env[62109]: result = function(*args, **kwargs) [ 621.747639] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.747639] env[62109]: return func(*args, **kwargs) [ 621.747639] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.747639] env[62109]: raise e [ 621.747639] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.747639] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 621.747639] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.747639] env[62109]: created_port_ids = self._update_ports_for_instance( [ 621.747639] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.747639] env[62109]: with excutils.save_and_reraise_exception(): [ 621.747639] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.747639] env[62109]: self.force_reraise() [ 621.747639] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.747639] env[62109]: raise self.value [ 621.747639] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.747639] env[62109]: updated_port = self._update_port( [ 621.747639] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.747639] env[62109]: _ensure_no_port_binding_failure(port) [ 621.747639] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.747639] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 621.747639] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 621.747639] env[62109]: Removing descriptor: 18 [ 621.846125] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 621.880053] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.880353] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.880754] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.880986] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.881187] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.881543] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.881820] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.882026] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.882473] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.882700] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.882914] env[62109]: DEBUG nova.virt.hardware [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.884015] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b16341-a999-4951-83fb-feb5699836c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.895444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10567219-9d82-40db-981c-e607aeea66ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.909433] env[62109]: ERROR nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Traceback (most recent call last): [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] yield resources [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.driver.spawn(context, instance, image_meta, [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] vm_ref = self.build_virtual_machine(instance, [ 621.909433] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] for vif in network_info: [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return self._sync_wrapper(fn, *args, **kwargs) [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.wait() [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self[:] = self._gt.wait() [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return self._exit_event.wait() [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 621.909897] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] current.throw(*self._exc) [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] result = function(*args, **kwargs) [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return func(*args, **kwargs) [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise e [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] nwinfo = self.network_api.allocate_for_instance( [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] created_port_ids = self._update_ports_for_instance( [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] with excutils.save_and_reraise_exception(): [ 621.910422] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.force_reraise() [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise self.value [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] updated_port = self._update_port( [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] _ensure_no_port_binding_failure(port) [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise exception.PortBindingFailed(port_id=port['id']) [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 621.910865] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] [ 621.910865] env[62109]: INFO nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Terminating instance [ 621.911802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.911958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquired lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.912133] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 621.975684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.986872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.165s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.987488] env[62109]: ERROR nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Traceback (most recent call last): [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.driver.spawn(context, instance, image_meta, [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] vm_ref = self.build_virtual_machine(instance, [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.987488] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] for vif in network_info: [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self._sync_wrapper(fn, *args, **kwargs) [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.wait() [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self[:] = self._gt.wait() [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self._exit_event.wait() [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] result = hub.switch() [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.987851] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return self.greenlet.switch() [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] result = function(*args, **kwargs) [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] return func(*args, **kwargs) [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise e [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] nwinfo = self.network_api.allocate_for_instance( [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] created_port_ids = self._update_ports_for_instance( [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] with excutils.save_and_reraise_exception(): [ 621.988266] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] self.force_reraise() [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise self.value [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] updated_port = self._update_port( [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] _ensure_no_port_binding_failure(port) [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] raise exception.PortBindingFailed(port_id=port['id']) [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] nova.exception.PortBindingFailed: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. [ 621.988950] env[62109]: ERROR nova.compute.manager [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] [ 621.989320] env[62109]: DEBUG nova.compute.utils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.989320] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.794s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.994323] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Build of instance 356e57cb-9e43-47e1-a02b-b81ff737883e was re-scheduled: Binding failed for port 41fc812d-5c4e-409c-bfa2-2fa230b4beea, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.994808] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.995190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquiring lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.995190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Acquired lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.995387] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.069768] env[62109]: DEBUG nova.compute.manager [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Received event network-changed-c0d41346-4c78-4c41-b1d3-ee4114fc60c7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.069978] env[62109]: DEBUG nova.compute.manager [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Refreshing instance network info cache due to event network-changed-c0d41346-4c78-4c41-b1d3-ee4114fc60c7. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 622.070175] env[62109]: DEBUG oslo_concurrency.lockutils [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] Acquiring lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.433995] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.512390] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.531759] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.709295] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.016588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Releasing lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.017794] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 623.018176] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 623.018718] env[62109]: DEBUG oslo_concurrency.lockutils [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] Acquired lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.019099] env[62109]: DEBUG nova.network.neutron [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Refreshing network info cache for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 623.024024] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ef559e1-f95e-434e-a5bc-93b929974699 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.042264] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846f6bba-f7df-4753-9f9f-0511a84dea9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.057969] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e80f6a1-0155-42a3-be1d-d03c5120967f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.068042] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2f4128-3f90-443c-98a2-d7fe86d65922 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.073191] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 693e6fa3-4d5f-47aa-8543-32f21001b78f could not be found. [ 623.073415] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 623.074812] env[62109]: INFO nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Took 0.06 seconds to destroy the instance on the hypervisor. [ 623.074812] env[62109]: DEBUG oslo.service.loopingcall [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.074812] env[62109]: DEBUG nova.compute.manager [-] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.074812] env[62109]: DEBUG nova.network.neutron [-] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.102460] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ade7b11-0002-49bf-84a2-78aff819e710 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.110763] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c956d944-73b5-4d0a-bdf8-42bfd8a1ae62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.123551] env[62109]: DEBUG nova.compute.provider_tree [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.214853] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Releasing lock "refresh_cache-356e57cb-9e43-47e1-a02b-b81ff737883e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.215113] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.215298] env[62109]: DEBUG nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.215478] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.231937] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.265285] env[62109]: DEBUG nova.network.neutron [-] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.541827] env[62109]: DEBUG nova.network.neutron [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.626392] env[62109]: DEBUG nova.scheduler.client.report [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.664574] env[62109]: DEBUG nova.network.neutron [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.737149] env[62109]: DEBUG nova.network.neutron [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.769399] env[62109]: DEBUG nova.network.neutron [-] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.131837] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.142s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.132491] env[62109]: ERROR nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Traceback (most recent call last): [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.driver.spawn(context, instance, image_meta, [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] vm_ref = self.build_virtual_machine(instance, [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.132491] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] for vif in network_info: [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self._sync_wrapper(fn, *args, **kwargs) [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.wait() [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self[:] = self._gt.wait() [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self._exit_event.wait() [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] result = hub.switch() [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.132735] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return self.greenlet.switch() [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] result = function(*args, **kwargs) [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] return func(*args, **kwargs) [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise e [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] nwinfo = self.network_api.allocate_for_instance( [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] created_port_ids = self._update_ports_for_instance( [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] with excutils.save_and_reraise_exception(): [ 624.132977] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] self.force_reraise() [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise self.value [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] updated_port = self._update_port( [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] _ensure_no_port_binding_failure(port) [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] raise exception.PortBindingFailed(port_id=port['id']) [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] nova.exception.PortBindingFailed: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. [ 624.133259] env[62109]: ERROR nova.compute.manager [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] [ 624.133511] env[62109]: DEBUG nova.compute.utils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.134402] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.103s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.136017] env[62109]: INFO nova.compute.claims [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.142019] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Build of instance 49137502-b0a0-49f8-affa-82f19caf34b0 was re-scheduled: Binding failed for port 8c87bcce-c05b-48d6-8366-75225b049447, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 624.142019] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 624.142019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.142019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.142223] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.167761] env[62109]: DEBUG oslo_concurrency.lockutils [req-be18d01f-2dcb-4127-87f9-a0f60de0c00b req-e70b22f5-b855-4deb-b886-e57e57787eae service nova] Releasing lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.243864] env[62109]: INFO nova.compute.manager [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] [instance: 356e57cb-9e43-47e1-a02b-b81ff737883e] Took 1.03 seconds to deallocate network for instance. [ 624.273385] env[62109]: INFO nova.compute.manager [-] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Took 1.20 seconds to deallocate network for instance. [ 624.277225] env[62109]: DEBUG nova.compute.claims [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 624.277225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.651447] env[62109]: DEBUG nova.compute.manager [req-4c849dd9-5314-4f64-8e8d-7a51c0970e9e req-29fd4e42-0590-465c-937c-b7dae5bf7366 service nova] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Received event network-vif-deleted-c0d41346-4c78-4c41-b1d3-ee4114fc60c7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.668183] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.839545] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.298216] env[62109]: INFO nova.scheduler.client.report [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Deleted allocations for instance 356e57cb-9e43-47e1-a02b-b81ff737883e [ 625.345609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-49137502-b0a0-49f8-affa-82f19caf34b0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.345864] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 625.346058] env[62109]: DEBUG nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.346236] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 625.378427] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.646047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e420a46-bd65-4e59-96a9-9cfc57635b2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.652989] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc73695-5ed8-4735-b472-82a89d9d2eff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.686945] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced52daa-67cb-4aff-998b-b5b1703d956b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.696709] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011a2af3-5a8e-412f-bed0-1a6b8ef91585 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.710763] env[62109]: DEBUG nova.compute.provider_tree [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.810348] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b1d598d-4d77-4c14-b63e-4b2a5a1ed073 tempest-AttachInterfacesV270Test-633058496 tempest-AttachInterfacesV270Test-633058496-project-member] Lock "356e57cb-9e43-47e1-a02b-b81ff737883e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.764s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.882537] env[62109]: DEBUG nova.network.neutron [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.214318] env[62109]: DEBUG nova.scheduler.client.report [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.313448] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.385985] env[62109]: INFO nova.compute.manager [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: 49137502-b0a0-49f8-affa-82f19caf34b0] Took 1.04 seconds to deallocate network for instance. [ 626.720225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.720750] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 626.724132] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.700s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.844141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.230173] env[62109]: DEBUG nova.compute.utils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.235041] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 627.235041] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.333121] env[62109]: DEBUG nova.policy [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b8ac755b9de4977a964f3b6cfe09416', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '885390a7b4a94a9381d906709ff2d585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 627.417320] env[62109]: INFO nova.scheduler.client.report [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Deleted allocations for instance 49137502-b0a0-49f8-affa-82f19caf34b0 [ 627.739377] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 627.743676] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dd01a2-d86e-4b8f-8956-bf66be1fe9cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.753254] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99325f42-2abd-4fd9-98ab-1130eb1370d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.788236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4040463-2569-4fed-8689-3c900f75ddde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.801173] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd2280b-4c8f-45d3-aeb9-84485eadc81c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.817445] env[62109]: DEBUG nova.compute.provider_tree [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.926316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-213d4d93-e76d-41e2-9579-f10d2bef2297 tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "49137502-b0a0-49f8-affa-82f19caf34b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.588s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.938884] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Successfully created port: 8051c2fe-1a10-4aca-8678-f30fd380fe0e {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.320843] env[62109]: DEBUG nova.scheduler.client.report [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.430204] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 628.758140] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 628.792795] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.793089] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.793250] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.793431] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.793616] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.793754] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.793962] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.794303] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.794554] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.794770] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.794948] env[62109]: DEBUG nova.virt.hardware [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.795936] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5670dc6-18d9-4fff-96e1-91b7ba0e8de0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.804394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6c437f-1a46-4892-a58b-95778401c701 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.828650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.104s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.829544] env[62109]: ERROR nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Traceback (most recent call last): [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.driver.spawn(context, instance, image_meta, [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] vm_ref = self.build_virtual_machine(instance, [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.829544] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] for vif in network_info: [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return self._sync_wrapper(fn, *args, **kwargs) [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.wait() [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self[:] = self._gt.wait() [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return self._exit_event.wait() [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] current.throw(*self._exc) [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.830275] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] result = function(*args, **kwargs) [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] return func(*args, **kwargs) [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise e [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] nwinfo = self.network_api.allocate_for_instance( [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] created_port_ids = self._update_ports_for_instance( [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] with excutils.save_and_reraise_exception(): [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] self.force_reraise() [ 628.830571] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise self.value [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] updated_port = self._update_port( [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] _ensure_no_port_binding_failure(port) [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] raise exception.PortBindingFailed(port_id=port['id']) [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] nova.exception.PortBindingFailed: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. [ 628.830874] env[62109]: ERROR nova.compute.manager [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] [ 628.832958] env[62109]: DEBUG nova.compute.utils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.833697] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.073s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.835298] env[62109]: INFO nova.compute.claims [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.838599] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Build of instance 28e71e8c-2c47-4ea8-bd90-33eb064073e5 was re-scheduled: Binding failed for port 59c8d794-2ad6-4cce-900a-4e99a1b46eb6, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.839055] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.839313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.839529] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.839713] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.958144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.122119] env[62109]: DEBUG nova.compute.manager [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Received event network-changed-8051c2fe-1a10-4aca-8678-f30fd380fe0e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 629.122335] env[62109]: DEBUG nova.compute.manager [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Refreshing instance network info cache due to event network-changed-8051c2fe-1a10-4aca-8678-f30fd380fe0e. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 629.122548] env[62109]: DEBUG oslo_concurrency.lockutils [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] Acquiring lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.122718] env[62109]: DEBUG oslo_concurrency.lockutils [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] Acquired lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.122839] env[62109]: DEBUG nova.network.neutron [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Refreshing network info cache for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 629.365112] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.429476] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.559102] env[62109]: ERROR nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 629.559102] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.559102] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.559102] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.559102] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.559102] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.559102] env[62109]: ERROR nova.compute.manager raise self.value [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.559102] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 629.559102] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.559102] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 629.559629] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.559629] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 629.559629] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 629.559629] env[62109]: ERROR nova.compute.manager [ 629.559629] env[62109]: Traceback (most recent call last): [ 629.559629] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 629.559629] env[62109]: listener.cb(fileno) [ 629.559629] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.559629] env[62109]: result = function(*args, **kwargs) [ 629.559629] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.559629] env[62109]: return func(*args, **kwargs) [ 629.559629] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.559629] env[62109]: raise e [ 629.559629] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.559629] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 629.559629] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.559629] env[62109]: created_port_ids = self._update_ports_for_instance( [ 629.559629] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.559629] env[62109]: with excutils.save_and_reraise_exception(): [ 629.559629] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.559629] env[62109]: self.force_reraise() [ 629.559629] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.559629] env[62109]: raise self.value [ 629.559629] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.559629] env[62109]: updated_port = self._update_port( [ 629.559629] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.559629] env[62109]: _ensure_no_port_binding_failure(port) [ 629.559629] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.559629] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 629.560277] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 629.560277] env[62109]: Removing descriptor: 15 [ 629.560277] env[62109]: ERROR nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Traceback (most recent call last): [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] yield resources [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.driver.spawn(context, instance, image_meta, [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.560277] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] vm_ref = self.build_virtual_machine(instance, [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] for vif in network_info: [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self._sync_wrapper(fn, *args, **kwargs) [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.wait() [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self[:] = self._gt.wait() [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self._exit_event.wait() [ 629.560536] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] result = hub.switch() [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self.greenlet.switch() [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] result = function(*args, **kwargs) [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return func(*args, **kwargs) [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise e [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] nwinfo = self.network_api.allocate_for_instance( [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.560863] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] created_port_ids = self._update_ports_for_instance( [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] with excutils.save_and_reraise_exception(): [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.force_reraise() [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise self.value [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] updated_port = self._update_port( [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] _ensure_no_port_binding_failure(port) [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.561175] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise exception.PortBindingFailed(port_id=port['id']) [ 629.561461] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 629.561461] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] [ 629.561461] env[62109]: INFO nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Terminating instance [ 629.564407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.649524] env[62109]: DEBUG nova.network.neutron [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.767534] env[62109]: DEBUG nova.network.neutron [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.934686] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-28e71e8c-2c47-4ea8-bd90-33eb064073e5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.934686] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.934686] env[62109]: DEBUG nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.934686] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 629.952777] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.270262] env[62109]: DEBUG oslo_concurrency.lockutils [req-0630307e-6ad7-4b6a-95a8-3dc369215eb1 req-4c50e070-ad37-4c80-ac40-1467776c2b6c service nova] Releasing lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.270665] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.270852] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 630.323484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae42167f-6701-48dc-a001-dfd347860823 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.331564] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14d6f8e-09fe-414c-8d93-f1cb60a62270 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.368132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c767df-f1cf-4ba5-b5ce-9701dbd68b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.375683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb8c641-f06a-4978-93d4-9de634c6205b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.388919] env[62109]: DEBUG nova.compute.provider_tree [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.455137] env[62109]: DEBUG nova.network.neutron [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.775753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.777155] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.802972] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.894432] env[62109]: DEBUG nova.scheduler.client.report [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.958039] env[62109]: INFO nova.compute.manager [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 28e71e8c-2c47-4ea8-bd90-33eb064073e5] Took 1.03 seconds to deallocate network for instance. [ 631.074313] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.316928] env[62109]: DEBUG nova.compute.manager [req-14e03a20-7dbd-47a7-9ccc-73236f5f58ac req-6bc7e4d9-183d-45c5-b0bc-e5bc076dc6bb service nova] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Received event network-vif-deleted-8051c2fe-1a10-4aca-8678-f30fd380fe0e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 631.402713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.402713] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 631.406209] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.446s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.576981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.577434] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 631.577734] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 631.578473] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-498b84fe-231e-4601-953d-1313bc077efa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.588286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7c4b51-6a2c-46cc-a422-bfd06e8042f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.610555] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b60d334c-0834-4267-bb31-1f3c679a2e1d could not be found. [ 631.610863] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 631.611250] env[62109]: INFO nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 631.612251] env[62109]: DEBUG oslo.service.loopingcall [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.612251] env[62109]: DEBUG nova.compute.manager [-] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.612251] env[62109]: DEBUG nova.network.neutron [-] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 631.689657] env[62109]: DEBUG nova.network.neutron [-] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.916760] env[62109]: DEBUG nova.compute.utils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.923038] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.925057] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 632.000378] env[62109]: DEBUG nova.policy [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e93cb95a977642848692814515edc959', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94f752f777564cc58e9058e4c214a6e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 632.006114] env[62109]: INFO nova.scheduler.client.report [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Deleted allocations for instance 28e71e8c-2c47-4ea8-bd90-33eb064073e5 [ 632.193570] env[62109]: DEBUG nova.network.neutron [-] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.386018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137ad9d9-1110-49b8-b400-daf025207bdf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.394999] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e5e022-baf5-4105-91dc-8ba81b8c3913 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.430076] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 632.433869] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b98629e-e6aa-4cf3-b579-c3b694564e18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.446147] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf48b516-1811-4dd2-948c-eea15d7e146c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.461316] env[62109]: DEBUG nova.compute.provider_tree [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.519180] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f2f003a3-554e-4174-9ba1-e4a5e9747c7b tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "28e71e8c-2c47-4ea8-bd90-33eb064073e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.181s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.541693] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Successfully created port: f6bb382f-2bbc-4c88-933d-cdb3f0795d13 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.697020] env[62109]: INFO nova.compute.manager [-] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Took 1.08 seconds to deallocate network for instance. [ 632.699407] env[62109]: DEBUG nova.compute.claims [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 632.699658] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.964326] env[62109]: DEBUG nova.scheduler.client.report [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.025223] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 633.451778] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 633.474274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.066s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.474274] env[62109]: ERROR nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Traceback (most recent call last): [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.driver.spawn(context, instance, image_meta, [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.474274] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] vm_ref = self.build_virtual_machine(instance, [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] for vif in network_info: [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self._sync_wrapper(fn, *args, **kwargs) [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.wait() [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self[:] = self._gt.wait() [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self._exit_event.wait() [ 633.474494] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] result = hub.switch() [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return self.greenlet.switch() [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] result = function(*args, **kwargs) [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] return func(*args, **kwargs) [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise e [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] nwinfo = self.network_api.allocate_for_instance( [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.474800] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] created_port_ids = self._update_ports_for_instance( [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] with excutils.save_and_reraise_exception(): [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] self.force_reraise() [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise self.value [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] updated_port = self._update_port( [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] _ensure_no_port_binding_failure(port) [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.475150] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] raise exception.PortBindingFailed(port_id=port['id']) [ 633.475407] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] nova.exception.PortBindingFailed: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. [ 633.475407] env[62109]: ERROR nova.compute.manager [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] [ 633.475407] env[62109]: DEBUG nova.compute.utils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.477791] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.136s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.481585] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Build of instance 58f76ca2-8f1b-4d9f-887b-1527ba70e91c was re-scheduled: Binding failed for port a6986e60-16bd-476e-b76c-ace1af4b54c8, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 633.482064] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 633.482496] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquiring lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.482496] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Acquired lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.482690] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.490509] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.490858] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.490956] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.491051] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.491192] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.491334] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.491534] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.491691] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.491855] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.495514] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.495514] env[62109]: DEBUG nova.virt.hardware [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.496520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f20508-df81-432e-b521-216661e31fff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.506636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ad649c-6ee7-44f6-b9ae-efdd5a5e0232 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.556316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.815087] env[62109]: ERROR nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 633.815087] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.815087] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.815087] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.815087] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.815087] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.815087] env[62109]: ERROR nova.compute.manager raise self.value [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.815087] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.815087] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.815087] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.815529] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.815529] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.815529] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 633.815529] env[62109]: ERROR nova.compute.manager [ 633.815529] env[62109]: Traceback (most recent call last): [ 633.815529] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.815529] env[62109]: listener.cb(fileno) [ 633.815529] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.815529] env[62109]: result = function(*args, **kwargs) [ 633.815529] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.815529] env[62109]: return func(*args, **kwargs) [ 633.815529] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.815529] env[62109]: raise e [ 633.815529] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.815529] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 633.815529] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.815529] env[62109]: created_port_ids = self._update_ports_for_instance( [ 633.815529] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.815529] env[62109]: with excutils.save_and_reraise_exception(): [ 633.815529] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.815529] env[62109]: self.force_reraise() [ 633.815529] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.815529] env[62109]: raise self.value [ 633.815529] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.815529] env[62109]: updated_port = self._update_port( [ 633.815529] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.815529] env[62109]: _ensure_no_port_binding_failure(port) [ 633.815529] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.815529] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.816225] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 633.816225] env[62109]: Removing descriptor: 15 [ 633.817708] env[62109]: ERROR nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Traceback (most recent call last): [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] yield resources [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.driver.spawn(context, instance, image_meta, [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] vm_ref = self.build_virtual_machine(instance, [ 633.817708] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] for vif in network_info: [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self._sync_wrapper(fn, *args, **kwargs) [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.wait() [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self[:] = self._gt.wait() [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self._exit_event.wait() [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.818035] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] result = hub.switch() [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self.greenlet.switch() [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] result = function(*args, **kwargs) [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return func(*args, **kwargs) [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise e [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] nwinfo = self.network_api.allocate_for_instance( [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] created_port_ids = self._update_ports_for_instance( [ 633.818376] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] with excutils.save_and_reraise_exception(): [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.force_reraise() [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise self.value [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] updated_port = self._update_port( [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] _ensure_no_port_binding_failure(port) [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise exception.PortBindingFailed(port_id=port['id']) [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 633.818668] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] [ 633.818951] env[62109]: INFO nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Terminating instance [ 633.820407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.820533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquired lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.820689] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.024606] env[62109]: DEBUG nova.compute.manager [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Received event network-changed-f6bb382f-2bbc-4c88-933d-cdb3f0795d13 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 634.026153] env[62109]: DEBUG nova.compute.manager [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Refreshing instance network info cache due to event network-changed-f6bb382f-2bbc-4c88-933d-cdb3f0795d13. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 634.026153] env[62109]: DEBUG oslo_concurrency.lockutils [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] Acquiring lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.027129] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.237651] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.349109] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.573568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46720b5d-49f2-496b-aee7-16a8bfd9ea4e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.583527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1646862-2c32-4430-88fe-aed0de9b7a60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.624893] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.627248] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de1940-87ee-4c8e-80b4-aebde1a36a8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.639239] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1c0d54-a403-4f0c-a08e-a7da053f912d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.653585] env[62109]: DEBUG nova.compute.provider_tree [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.739102] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Releasing lock "refresh_cache-58f76ca2-8f1b-4d9f-887b-1527ba70e91c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.739896] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 634.740506] env[62109]: DEBUG nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.740575] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 634.761397] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.058344] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.058746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.093450] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.093450] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.134060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Releasing lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.134060] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 635.134060] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 635.134060] env[62109]: DEBUG oslo_concurrency.lockutils [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] Acquired lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.134060] env[62109]: DEBUG nova.network.neutron [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Refreshing network info cache for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.134341] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36115621-ec4b-4dc9-bf3c-56440c44cc9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.145830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9c654f-d270-48f2-a55b-74e66b89dbd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.160089] env[62109]: DEBUG nova.scheduler.client.report [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.176788] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a05a3519-0395-4e49-b655-a6c6d7bd85a9 could not be found. [ 635.177085] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.177305] env[62109]: INFO nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 635.177607] env[62109]: DEBUG oslo.service.loopingcall [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.177875] env[62109]: DEBUG nova.compute.manager [-] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.177985] env[62109]: DEBUG nova.network.neutron [-] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 635.203957] env[62109]: DEBUG nova.network.neutron [-] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.267846] env[62109]: DEBUG nova.network.neutron [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.431871] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.432317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.660777] env[62109]: DEBUG nova.network.neutron [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.668194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.190s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.670646] env[62109]: ERROR nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Traceback (most recent call last): [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.driver.spawn(context, instance, image_meta, [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] vm_ref = self.build_virtual_machine(instance, [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.670646] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] for vif in network_info: [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self._sync_wrapper(fn, *args, **kwargs) [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.wait() [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self[:] = self._gt.wait() [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self._exit_event.wait() [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] result = hub.switch() [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.670945] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return self.greenlet.switch() [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] result = function(*args, **kwargs) [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] return func(*args, **kwargs) [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise e [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] nwinfo = self.network_api.allocate_for_instance( [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] created_port_ids = self._update_ports_for_instance( [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] with excutils.save_and_reraise_exception(): [ 635.671251] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] self.force_reraise() [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise self.value [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] updated_port = self._update_port( [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] _ensure_no_port_binding_failure(port) [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] raise exception.PortBindingFailed(port_id=port['id']) [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] nova.exception.PortBindingFailed: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. [ 635.671532] env[62109]: ERROR nova.compute.manager [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] [ 635.671833] env[62109]: DEBUG nova.compute.utils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.673214] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Build of instance 21efb09c-8d90-415c-815a-af6ce6707c97 was re-scheduled: Binding failed for port 5fee0ba0-a98f-41d2-a74f-a40061ef22ab, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 635.674061] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 635.674298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquiring lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.674553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Acquired lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.675217] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 635.676385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.351s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.706391] env[62109]: DEBUG nova.network.neutron [-] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.770857] env[62109]: INFO nova.compute.manager [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] [instance: 58f76ca2-8f1b-4d9f-887b-1527ba70e91c] Took 1.03 seconds to deallocate network for instance. [ 635.805647] env[62109]: DEBUG nova.network.neutron [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.203571] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.212097] env[62109]: INFO nova.compute.manager [-] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Took 1.03 seconds to deallocate network for instance. [ 636.215713] env[62109]: DEBUG nova.compute.claims [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 636.216207] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.289885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.290111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.310430] env[62109]: DEBUG oslo_concurrency.lockutils [req-b80bfb12-1ada-4703-9007-da9f7281491a req-f1d7bee5-96d7-4e78-94fb-18bb89cd62bc service nova] Releasing lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.344926] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.399206] env[62109]: DEBUG nova.compute.manager [req-21eb661d-f091-453f-8290-d33a56e63314 req-230f6dfa-8961-4564-ba99-f7e9cfce648f service nova] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Received event network-vif-deleted-f6bb382f-2bbc-4c88-933d-cdb3f0795d13 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 636.570501] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "32cccd30-278c-48b6-8855-5cd76c2da057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.570713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.647654] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2657a1-f5c9-4f0d-a2ae-db6ba81af31e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.655652] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54afc802-bd97-4be7-8431-1a0e09aec066 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.688465] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f631d6d-1a7c-4390-9581-f0a9e0236946 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.699648] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eccb308-5fc8-4f00-b380-4f66f0cc610c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.710748] env[62109]: DEBUG nova.compute.provider_tree [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.809067] env[62109]: INFO nova.scheduler.client.report [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Deleted allocations for instance 58f76ca2-8f1b-4d9f-887b-1527ba70e91c [ 636.848230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Releasing lock "refresh_cache-21efb09c-8d90-415c-815a-af6ce6707c97" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.848537] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 636.849118] env[62109]: DEBUG nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 636.849118] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.871133] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.213389] env[62109]: DEBUG nova.scheduler.client.report [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.316191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-381f3ddf-7aec-4f8c-92ec-062a4eabba8d tempest-ImagesOneServerNegativeTestJSON-890932015 tempest-ImagesOneServerNegativeTestJSON-890932015-project-member] Lock "58f76ca2-8f1b-4d9f-887b-1527ba70e91c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.817s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.376020] env[62109]: DEBUG nova.network.neutron [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.718573] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.042s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.720186] env[62109]: ERROR nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Traceback (most recent call last): [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.driver.spawn(context, instance, image_meta, [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] vm_ref = self.build_virtual_machine(instance, [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.720186] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] for vif in network_info: [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return self._sync_wrapper(fn, *args, **kwargs) [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.wait() [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self[:] = self._gt.wait() [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return self._exit_event.wait() [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] current.throw(*self._exc) [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.720538] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] result = function(*args, **kwargs) [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] return func(*args, **kwargs) [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise e [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] nwinfo = self.network_api.allocate_for_instance( [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] created_port_ids = self._update_ports_for_instance( [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] with excutils.save_and_reraise_exception(): [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] self.force_reraise() [ 637.720932] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise self.value [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] updated_port = self._update_port( [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] _ensure_no_port_binding_failure(port) [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] raise exception.PortBindingFailed(port_id=port['id']) [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] nova.exception.PortBindingFailed: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. [ 637.721390] env[62109]: ERROR nova.compute.manager [instance: a6d094c3-8488-4437-8972-aa246809a5b1] [ 637.721390] env[62109]: DEBUG nova.compute.utils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.721621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.745s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.722560] env[62109]: INFO nova.compute.claims [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.725164] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Build of instance a6d094c3-8488-4437-8972-aa246809a5b1 was re-scheduled: Binding failed for port 022476a0-892b-4e81-afca-c01cc29d6f07, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 637.725579] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 637.725800] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.725946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.726115] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.819186] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 637.878523] env[62109]: INFO nova.compute.manager [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] [instance: 21efb09c-8d90-415c-815a-af6ce6707c97] Took 1.03 seconds to deallocate network for instance. [ 638.248177] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.340216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.394077] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.900259] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-a6d094c3-8488-4437-8972-aa246809a5b1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.900259] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.900259] env[62109]: DEBUG nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.900259] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.912898] env[62109]: INFO nova.scheduler.client.report [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Deleted allocations for instance 21efb09c-8d90-415c-815a-af6ce6707c97 [ 638.941744] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.226520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b222310-2600-432d-8f8f-f8ef67de83be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.234755] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe4890a-d78f-48d9-9082-aa9da9cfaaa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.264892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a08186f-0139-49fa-9192-979571b20691 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.272081] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669c7520-9ada-4afa-9951-ec4e5ba68396 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.286756] env[62109]: DEBUG nova.compute.provider_tree [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.421291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b04429d9-108d-40d3-9e72-cb31975d38fd tempest-AttachInterfacesUnderV243Test-849231851 tempest-AttachInterfacesUnderV243Test-849231851-project-member] Lock "21efb09c-8d90-415c-815a-af6ce6707c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.937s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.443169] env[62109]: DEBUG nova.network.neutron [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.792017] env[62109]: DEBUG nova.scheduler.client.report [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 639.924184] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 639.947652] env[62109]: INFO nova.compute.manager [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: a6d094c3-8488-4437-8972-aa246809a5b1] Took 1.05 seconds to deallocate network for instance. [ 640.295913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.296963] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 640.299942] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.023s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.449167] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.804280] env[62109]: DEBUG nova.compute.utils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.809840] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 640.810238] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 640.888111] env[62109]: DEBUG nova.policy [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64678c7edb1e449da933c3438fb88353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9406794e594260a4373c674ec12cf9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 640.981392] env[62109]: INFO nova.scheduler.client.report [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Deleted allocations for instance a6d094c3-8488-4437-8972-aa246809a5b1 [ 641.278565] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Successfully created port: f196c7a4-5f7e-40b5-a3d7-3eda1245c24e {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.310650] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 641.316114] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3e2b33-a09d-433e-845c-a37f8610e967 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.323484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0b77ba-aaba-467f-9357-3167f5e87416 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.356229] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a7a347-fd12-4747-9a2a-99ee0e91cbb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.364842] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a11a325-b982-411e-9fad-d253dc718536 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.382391] env[62109]: DEBUG nova.compute.provider_tree [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.501709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c2567b-f309-4b4f-9b51-108fe20b00ab tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "a6d094c3-8488-4437-8972-aa246809a5b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.554s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.888055] env[62109]: DEBUG nova.scheduler.client.report [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.007571] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 642.325757] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 642.370873] env[62109]: DEBUG nova.compute.manager [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Received event network-changed-f196c7a4-5f7e-40b5-a3d7-3eda1245c24e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 642.372036] env[62109]: DEBUG nova.compute.manager [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Refreshing instance network info cache due to event network-changed-f196c7a4-5f7e-40b5-a3d7-3eda1245c24e. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 642.372036] env[62109]: DEBUG oslo_concurrency.lockutils [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] Acquiring lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.372036] env[62109]: DEBUG oslo_concurrency.lockutils [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] Acquired lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.372036] env[62109]: DEBUG nova.network.neutron [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Refreshing network info cache for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.377610] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 642.377876] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 642.378083] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.378262] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 642.378443] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.378648] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 642.379637] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 642.379637] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 642.379637] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 642.379637] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 642.379637] env[62109]: DEBUG nova.virt.hardware [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.380680] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2446e3-ee86-4728-9b0c-c10555a27ced {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.393352] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c3584-0479-47aa-a64e-6a83be33be25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.397895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.098s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.398759] env[62109]: ERROR nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Traceback (most recent call last): [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.driver.spawn(context, instance, image_meta, [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] vm_ref = self.build_virtual_machine(instance, [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.398759] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] for vif in network_info: [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return self._sync_wrapper(fn, *args, **kwargs) [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.wait() [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self[:] = self._gt.wait() [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return self._exit_event.wait() [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] current.throw(*self._exc) [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.399132] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] result = function(*args, **kwargs) [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] return func(*args, **kwargs) [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise e [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] nwinfo = self.network_api.allocate_for_instance( [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] created_port_ids = self._update_ports_for_instance( [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] with excutils.save_and_reraise_exception(): [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] self.force_reraise() [ 642.399510] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise self.value [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] updated_port = self._update_port( [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] _ensure_no_port_binding_failure(port) [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] raise exception.PortBindingFailed(port_id=port['id']) [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] nova.exception.PortBindingFailed: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. [ 642.399853] env[62109]: ERROR nova.compute.manager [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] [ 642.399853] env[62109]: DEBUG nova.compute.utils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.400480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.402282] env[62109]: INFO nova.compute.claims [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.405548] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Build of instance 693e6fa3-4d5f-47aa-8543-32f21001b78f was re-scheduled: Binding failed for port c0d41346-4c78-4c41-b1d3-ee4114fc60c7, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 642.406017] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 642.406269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.406423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquired lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.406656] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.419146] env[62109]: DEBUG nova.network.neutron [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.486342] env[62109]: DEBUG nova.network.neutron [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.533179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.549830] env[62109]: ERROR nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 642.549830] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.549830] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.549830] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.549830] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.549830] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.549830] env[62109]: ERROR nova.compute.manager raise self.value [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.549830] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 642.549830] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.549830] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 642.550234] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.550234] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 642.550234] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 642.550234] env[62109]: ERROR nova.compute.manager [ 642.550234] env[62109]: Traceback (most recent call last): [ 642.550234] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 642.550234] env[62109]: listener.cb(fileno) [ 642.550234] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.550234] env[62109]: result = function(*args, **kwargs) [ 642.550234] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.550234] env[62109]: return func(*args, **kwargs) [ 642.550234] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.550234] env[62109]: raise e [ 642.550234] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.550234] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 642.550234] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.550234] env[62109]: created_port_ids = self._update_ports_for_instance( [ 642.550234] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.550234] env[62109]: with excutils.save_and_reraise_exception(): [ 642.550234] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.550234] env[62109]: self.force_reraise() [ 642.550234] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.550234] env[62109]: raise self.value [ 642.550234] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.550234] env[62109]: updated_port = self._update_port( [ 642.550234] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.550234] env[62109]: _ensure_no_port_binding_failure(port) [ 642.550234] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.550234] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 642.550911] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 642.550911] env[62109]: Removing descriptor: 15 [ 642.550911] env[62109]: ERROR nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Traceback (most recent call last): [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] yield resources [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.driver.spawn(context, instance, image_meta, [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.550911] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] vm_ref = self.build_virtual_machine(instance, [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] for vif in network_info: [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self._sync_wrapper(fn, *args, **kwargs) [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.wait() [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self[:] = self._gt.wait() [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self._exit_event.wait() [ 642.551234] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] result = hub.switch() [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self.greenlet.switch() [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] result = function(*args, **kwargs) [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return func(*args, **kwargs) [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise e [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] nwinfo = self.network_api.allocate_for_instance( [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.551550] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] created_port_ids = self._update_ports_for_instance( [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] with excutils.save_and_reraise_exception(): [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.force_reraise() [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise self.value [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] updated_port = self._update_port( [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] _ensure_no_port_binding_failure(port) [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.551845] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise exception.PortBindingFailed(port_id=port['id']) [ 642.552115] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 642.552115] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] [ 642.552115] env[62109]: INFO nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Terminating instance [ 642.552115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.929951] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.990830] env[62109]: DEBUG oslo_concurrency.lockutils [req-220391f1-9214-4d56-b3f5-1080fba8fa50 req-c5e8ebb3-2086-45f3-8fc8-22c8492ce318 service nova] Releasing lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.990830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquired lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.990830] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.051398] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.510298] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.557210] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Releasing lock "refresh_cache-693e6fa3-4d5f-47aa-8543-32f21001b78f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.557449] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 643.557625] env[62109]: DEBUG nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.557791] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 643.566831] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.577028] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.870577] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9908cb5-577d-4c6c-b6f5-a924c5bd87dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.877948] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7780a604-c5ef-47b9-860c-81d213dc150d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.910756] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a409be-3c25-4e36-bd47-9f65b7bd063a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.918045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e99193-e52e-4512-8b90-ecb81bb2d7f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.931020] env[62109]: DEBUG nova.compute.provider_tree [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.070591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Releasing lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.071027] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 644.071226] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 644.071510] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe05b839-9679-43bf-9c53-58f0f4eaa024 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.078015] env[62109]: DEBUG nova.network.neutron [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.081536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb75fb8e-ea2c-492a-b54b-57e12912bb66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.104704] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e50019d2-d9a1-4077-ba1a-7f7bde266058 could not be found. [ 644.104950] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 644.105122] env[62109]: INFO nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Took 0.03 seconds to destroy the instance on the hypervisor. [ 644.105358] env[62109]: DEBUG oslo.service.loopingcall [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.105548] env[62109]: DEBUG nova.compute.manager [-] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.105661] env[62109]: DEBUG nova.network.neutron [-] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.120227] env[62109]: DEBUG nova.network.neutron [-] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.431360] env[62109]: DEBUG nova.compute.manager [req-f110d1c7-3088-4eee-a7d4-898d2ddfaad1 req-1d5d96ab-c029-46e9-bcf3-00d7d3c5a586 service nova] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Received event network-vif-deleted-f196c7a4-5f7e-40b5-a3d7-3eda1245c24e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 644.433611] env[62109]: DEBUG nova.scheduler.client.report [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.593919] env[62109]: INFO nova.compute.manager [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: 693e6fa3-4d5f-47aa-8543-32f21001b78f] Took 1.04 seconds to deallocate network for instance. [ 644.622530] env[62109]: DEBUG nova.network.neutron [-] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.938789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.939591] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.944101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.987s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.945547] env[62109]: INFO nova.compute.claims [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.131547] env[62109]: INFO nova.compute.manager [-] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Took 1.03 seconds to deallocate network for instance. [ 645.134119] env[62109]: DEBUG nova.compute.claims [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.134296] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.453114] env[62109]: DEBUG nova.compute.utils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.456868] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 645.457052] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 645.514886] env[62109]: DEBUG nova.policy [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2988618e18934aa6b85d2ea288917ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275238e3083540aa838de6d5cccf61eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.630575] env[62109]: INFO nova.scheduler.client.report [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Deleted allocations for instance 693e6fa3-4d5f-47aa-8543-32f21001b78f [ 645.813621] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Successfully created port: a4fdec54-dd81-4e7f-a2a1-876cd3012614 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.958232] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 646.147283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-126ce572-4385-4579-b747-bfb695c9f616 tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "693e6fa3-4d5f-47aa-8543-32f21001b78f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.470s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.246720] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 646.246940] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 646.495341] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aff045-f653-479a-b5ed-4249f957f1cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.505751] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1826ce59-50b9-44cd-8d7f-de0a379302e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.547151] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d090c3-88ec-42cb-bdf3-a00f4ba9c638 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.558473] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4e4601-601e-4168-b3ec-9b15a92832bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.573727] env[62109]: DEBUG nova.compute.provider_tree [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.650847] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 646.754446] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 646.754446] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 646.754446] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 646.877099] env[62109]: DEBUG nova.compute.manager [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Received event network-changed-a4fdec54-dd81-4e7f-a2a1-876cd3012614 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 646.877886] env[62109]: DEBUG nova.compute.manager [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Refreshing instance network info cache due to event network-changed-a4fdec54-dd81-4e7f-a2a1-876cd3012614. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 646.877886] env[62109]: DEBUG oslo_concurrency.lockutils [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] Acquiring lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.877886] env[62109]: DEBUG oslo_concurrency.lockutils [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] Acquired lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.878109] env[62109]: DEBUG nova.network.neutron [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Refreshing network info cache for port a4fdec54-dd81-4e7f-a2a1-876cd3012614 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 646.968011] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 647.007551] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.008689] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.008689] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.008689] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.008689] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.008689] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.008894] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.008923] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.009111] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.009280] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.009446] env[62109]: DEBUG nova.virt.hardware [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.010847] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0f9252-4062-4c9d-9c90-2c6913abdee0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.019365] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f91f8a-42d7-4f13-ad08-75abe240a914 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.078097] env[62109]: DEBUG nova.scheduler.client.report [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.087278] env[62109]: ERROR nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 647.087278] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.087278] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.087278] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.087278] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.087278] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.087278] env[62109]: ERROR nova.compute.manager raise self.value [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.087278] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.087278] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.087278] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.088027] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.088027] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.088027] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 647.088027] env[62109]: ERROR nova.compute.manager [ 647.088027] env[62109]: Traceback (most recent call last): [ 647.088027] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.088027] env[62109]: listener.cb(fileno) [ 647.088027] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.088027] env[62109]: result = function(*args, **kwargs) [ 647.088027] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.088027] env[62109]: return func(*args, **kwargs) [ 647.088027] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.088027] env[62109]: raise e [ 647.088027] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.088027] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 647.088027] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.088027] env[62109]: created_port_ids = self._update_ports_for_instance( [ 647.088027] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.088027] env[62109]: with excutils.save_and_reraise_exception(): [ 647.088027] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.088027] env[62109]: self.force_reraise() [ 647.088027] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.088027] env[62109]: raise self.value [ 647.088027] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.088027] env[62109]: updated_port = self._update_port( [ 647.088027] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.088027] env[62109]: _ensure_no_port_binding_failure(port) [ 647.088027] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.088027] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.089847] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 647.089847] env[62109]: Removing descriptor: 18 [ 647.089847] env[62109]: ERROR nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Traceback (most recent call last): [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] yield resources [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.driver.spawn(context, instance, image_meta, [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.089847] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] vm_ref = self.build_virtual_machine(instance, [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] for vif in network_info: [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self._sync_wrapper(fn, *args, **kwargs) [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.wait() [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self[:] = self._gt.wait() [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self._exit_event.wait() [ 647.090480] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] result = hub.switch() [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self.greenlet.switch() [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] result = function(*args, **kwargs) [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return func(*args, **kwargs) [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise e [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] nwinfo = self.network_api.allocate_for_instance( [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.091189] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] created_port_ids = self._update_ports_for_instance( [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] with excutils.save_and_reraise_exception(): [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.force_reraise() [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise self.value [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] updated_port = self._update_port( [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] _ensure_no_port_binding_failure(port) [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.091801] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise exception.PortBindingFailed(port_id=port['id']) [ 647.092308] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 647.092308] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] [ 647.092308] env[62109]: INFO nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Terminating instance [ 647.092308] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.185222] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.257600] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.257778] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.257921] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.258069] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.258196] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 647.258341] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 647.258551] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.258720] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.258881] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.260335] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.260335] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.260335] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.260335] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 647.260335] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.398453] env[62109]: DEBUG nova.network.neutron [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.496548] env[62109]: DEBUG nova.network.neutron [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.582704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.583269] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.586068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.886s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.763778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.001289] env[62109]: DEBUG oslo_concurrency.lockutils [req-f5c633b7-a436-4ae3-a666-74c4a3f54b7f req-87e14b11-7513-49fa-beb9-0bb9bd8fc7e0 service nova] Releasing lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.001994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.001994] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 648.092370] env[62109]: DEBUG nova.compute.utils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.096026] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 648.096617] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.141956] env[62109]: DEBUG nova.policy [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 648.420588] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Successfully created port: 49919bfd-f918-4e38-ba86-940a0ce0728a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.520569] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8065ce-b229-46af-a71b-7119db3413c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.528357] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bc11af-28e8-4d22-8502-ee47ba83a424 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.569633] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.572140] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ee370f-33a8-487a-9d06-8f6a7ca70fa0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.580732] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb808304-d17f-43d2-8456-d5c51368fbcf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.213729] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 649.221215] env[62109]: DEBUG nova.compute.manager [req-5ab5361c-2225-44b0-8288-a1a71e9f1edd req-32b4c014-ecdc-4169-b852-9af09f217a44 service nova] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Received event network-vif-deleted-a4fdec54-dd81-4e7f-a2a1-876cd3012614 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.230397] env[62109]: DEBUG nova.compute.provider_tree [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.285531] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.445376] env[62109]: ERROR nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 649.445376] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.445376] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.445376] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.445376] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.445376] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.445376] env[62109]: ERROR nova.compute.manager raise self.value [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.445376] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 649.445376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.445376] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 649.445874] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.445874] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 649.445874] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 649.445874] env[62109]: ERROR nova.compute.manager [ 649.445874] env[62109]: Traceback (most recent call last): [ 649.445874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 649.445874] env[62109]: listener.cb(fileno) [ 649.445874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.445874] env[62109]: result = function(*args, **kwargs) [ 649.445874] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.445874] env[62109]: return func(*args, **kwargs) [ 649.445874] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.445874] env[62109]: raise e [ 649.445874] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.445874] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 649.445874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.445874] env[62109]: created_port_ids = self._update_ports_for_instance( [ 649.445874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.445874] env[62109]: with excutils.save_and_reraise_exception(): [ 649.445874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.445874] env[62109]: self.force_reraise() [ 649.445874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.445874] env[62109]: raise self.value [ 649.445874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.445874] env[62109]: updated_port = self._update_port( [ 649.445874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.445874] env[62109]: _ensure_no_port_binding_failure(port) [ 649.445874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.445874] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 649.446677] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 649.446677] env[62109]: Removing descriptor: 18 [ 649.733051] env[62109]: DEBUG nova.scheduler.client.report [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.788535] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.788978] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 649.789179] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 649.789444] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f67dbb5-f3cc-4005-b409-0d1c41bf8700 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.798029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef38fc0-ffae-4612-a38c-f66f59d2619e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.819279] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 590e6f9a-b764-44b4-9117-3deff696a6aa could not be found. [ 649.819525] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 649.819714] env[62109]: INFO nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Took 0.03 seconds to destroy the instance on the hypervisor. [ 649.820118] env[62109]: DEBUG oslo.service.loopingcall [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.820322] env[62109]: DEBUG nova.compute.manager [-] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 649.820453] env[62109]: DEBUG nova.network.neutron [-] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 649.835753] env[62109]: DEBUG nova.network.neutron [-] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.226996] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 650.238244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.238954] env[62109]: ERROR nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Traceback (most recent call last): [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.driver.spawn(context, instance, image_meta, [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] vm_ref = self.build_virtual_machine(instance, [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.238954] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] for vif in network_info: [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self._sync_wrapper(fn, *args, **kwargs) [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.wait() [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self[:] = self._gt.wait() [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self._exit_event.wait() [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] result = hub.switch() [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.239295] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return self.greenlet.switch() [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] result = function(*args, **kwargs) [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] return func(*args, **kwargs) [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise e [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] nwinfo = self.network_api.allocate_for_instance( [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] created_port_ids = self._update_ports_for_instance( [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] with excutils.save_and_reraise_exception(): [ 650.239599] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] self.force_reraise() [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise self.value [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] updated_port = self._update_port( [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] _ensure_no_port_binding_failure(port) [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] raise exception.PortBindingFailed(port_id=port['id']) [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] nova.exception.PortBindingFailed: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. [ 650.239891] env[62109]: ERROR nova.compute.manager [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] [ 650.240197] env[62109]: DEBUG nova.compute.utils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 650.241405] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Build of instance b60d334c-0834-4267-bb31-1f3c679a2e1d was re-scheduled: Binding failed for port 8051c2fe-1a10-4aca-8678-f30fd380fe0e, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 650.241900] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 650.242194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquiring lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.242386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Acquired lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.242518] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.246360] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.690s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.247955] env[62109]: INFO nova.compute.claims [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.259527] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.259822] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.260061] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.260270] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.260420] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.260568] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.260774] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.260946] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.261233] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.261374] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.261553] env[62109]: DEBUG nova.virt.hardware [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.262694] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0eaf48-30fc-479f-afe9-23a71ba87125 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.271908] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842da528-7f8c-48bd-bfc2-ba1e46cd4f7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.286686] env[62109]: ERROR nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Traceback (most recent call last): [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] yield resources [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.driver.spawn(context, instance, image_meta, [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] vm_ref = self.build_virtual_machine(instance, [ 650.286686] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] for vif in network_info: [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return self._sync_wrapper(fn, *args, **kwargs) [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.wait() [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self[:] = self._gt.wait() [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return self._exit_event.wait() [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 650.287201] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] current.throw(*self._exc) [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] result = function(*args, **kwargs) [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return func(*args, **kwargs) [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise e [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] nwinfo = self.network_api.allocate_for_instance( [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] created_port_ids = self._update_ports_for_instance( [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] with excutils.save_and_reraise_exception(): [ 650.287529] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.force_reraise() [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise self.value [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] updated_port = self._update_port( [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] _ensure_no_port_binding_failure(port) [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise exception.PortBindingFailed(port_id=port['id']) [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 650.287825] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] [ 650.287825] env[62109]: INFO nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Terminating instance [ 650.289976] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.290283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.290387] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.339157] env[62109]: DEBUG nova.network.neutron [-] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.770529] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.807694] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.842549] env[62109]: INFO nova.compute.manager [-] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Took 1.02 seconds to deallocate network for instance. [ 650.846736] env[62109]: DEBUG nova.compute.claims [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 650.846736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.848180] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.916875] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.950802] env[62109]: DEBUG nova.compute.manager [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Received event network-changed-49919bfd-f918-4e38-ba86-940a0ce0728a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 650.951010] env[62109]: DEBUG nova.compute.manager [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Refreshing instance network info cache due to event network-changed-49919bfd-f918-4e38-ba86-940a0ce0728a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 650.951202] env[62109]: DEBUG oslo_concurrency.lockutils [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] Acquiring lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.350867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Releasing lock "refresh_cache-b60d334c-0834-4267-bb31-1f3c679a2e1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.351137] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 651.351213] env[62109]: DEBUG nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.351461] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 651.380534] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.419617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.420067] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 651.420288] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 651.423342] env[62109]: DEBUG oslo_concurrency.lockutils [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] Acquired lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.423533] env[62109]: DEBUG nova.network.neutron [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Refreshing network info cache for port 49919bfd-f918-4e38-ba86-940a0ce0728a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 651.424650] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e908e65-cc2b-4933-a5cd-d871276b6404 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.434329] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c082dd-dea5-4642-ad49-2f14cd70ae22 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.460422] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 226938f5-f903-4671-b7a3-c6f5a264506e could not be found. [ 651.460658] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 651.460831] env[62109]: INFO nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 651.461078] env[62109]: DEBUG oslo.service.loopingcall [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.463558] env[62109]: DEBUG nova.compute.manager [-] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.463657] env[62109]: DEBUG nova.network.neutron [-] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 651.491820] env[62109]: DEBUG nova.network.neutron [-] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.661919] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384fdec8-98ee-4f5f-b0bf-c1a3532b4129 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.669725] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f69361-29ee-436c-807c-56f2445fff27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.704021] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc507cb2-7fdd-4b90-a6d1-e880c9dacbcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.707527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d84888d-6031-44bc-bbae-f5e15e7cc6c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.720968] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 651.884333] env[62109]: DEBUG nova.network.neutron [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.994304] env[62109]: DEBUG nova.network.neutron [-] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.079854] env[62109]: DEBUG nova.network.neutron [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.186607] env[62109]: DEBUG nova.network.neutron [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.247638] env[62109]: ERROR nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [req-531c58ca-69ee-423e-8188-2e7ecba43554] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-531c58ca-69ee-423e-8188-2e7ecba43554"}]} [ 652.264367] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 652.279030] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 652.279030] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 652.290145] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 652.308870] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 652.386564] env[62109]: INFO nova.compute.manager [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] [instance: b60d334c-0834-4267-bb31-1f3c679a2e1d] Took 1.03 seconds to deallocate network for instance. [ 652.496593] env[62109]: INFO nova.compute.manager [-] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Took 1.03 seconds to deallocate network for instance. [ 652.498549] env[62109]: DEBUG nova.compute.claims [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 652.498729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.691428] env[62109]: DEBUG oslo_concurrency.lockutils [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] Releasing lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.691721] env[62109]: DEBUG nova.compute.manager [req-e49f3904-23e4-4594-b4a5-80938d3b960f req-a78da161-d762-41f1-a2c3-dd19acfa09c2 service nova] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Received event network-vif-deleted-49919bfd-f918-4e38-ba86-940a0ce0728a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.703284] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64daf4e0-3eb7-476b-bc59-f5644eb70a0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.711589] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003c12b9-6396-41be-9261-c4ff9f52019b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.745250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82282a8-fb11-4578-9bb6-15667cbc210f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.752856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0708b11-ec07-41d6-a379-46c97315e874 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.767621] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 653.301295] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 653.301563] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 57 to 58 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 653.301742] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 653.416875] env[62109]: INFO nova.scheduler.client.report [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Deleted allocations for instance b60d334c-0834-4267-bb31-1f3c679a2e1d [ 653.806826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.560s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.807388] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 653.810041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.594s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.925469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-759938fb-dd2f-4d28-8846-10b50f7949aa tempest-ListServerFiltersTestJSON-1274476009 tempest-ListServerFiltersTestJSON-1274476009-project-member] Lock "b60d334c-0834-4267-bb31-1f3c679a2e1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.475s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.315302] env[62109]: DEBUG nova.compute.utils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 654.319362] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 654.319531] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 654.374923] env[62109]: DEBUG nova.policy [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 654.428664] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 654.733371] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1b2a91-8460-4e4b-9390-c1dcee891a39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.740190] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97dc0d4-c78e-42fd-bdde-2fb6701b974e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.770947] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950cb6ce-57d8-4327-bfb5-e0fe276c82cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.778317] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caa2b5e-22af-48ea-823d-2cd626094dcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.792762] env[62109]: DEBUG nova.compute.provider_tree [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.822305] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 654.857174] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Successfully created port: 8ef102f4-e9bf-4498-8590-632fb7e51a15 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.946562] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.296319] env[62109]: DEBUG nova.scheduler.client.report [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 655.801662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.991s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.802347] env[62109]: ERROR nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Traceback (most recent call last): [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.driver.spawn(context, instance, image_meta, [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] vm_ref = self.build_virtual_machine(instance, [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.802347] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] for vif in network_info: [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self._sync_wrapper(fn, *args, **kwargs) [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.wait() [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self[:] = self._gt.wait() [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self._exit_event.wait() [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] result = hub.switch() [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 655.802695] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return self.greenlet.switch() [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] result = function(*args, **kwargs) [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] return func(*args, **kwargs) [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise e [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] nwinfo = self.network_api.allocate_for_instance( [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] created_port_ids = self._update_ports_for_instance( [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] with excutils.save_and_reraise_exception(): [ 655.803065] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] self.force_reraise() [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise self.value [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] updated_port = self._update_port( [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] _ensure_no_port_binding_failure(port) [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] raise exception.PortBindingFailed(port_id=port['id']) [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] nova.exception.PortBindingFailed: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. [ 655.803384] env[62109]: ERROR nova.compute.manager [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] [ 655.803659] env[62109]: DEBUG nova.compute.utils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 655.804740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.465s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.806289] env[62109]: INFO nova.compute.claims [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.811012] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Build of instance a05a3519-0395-4e49-b655-a6c6d7bd85a9 was re-scheduled: Binding failed for port f6bb382f-2bbc-4c88-933d-cdb3f0795d13, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 655.812166] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 655.812166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquiring lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.812166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Acquired lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.812166] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.832470] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 655.867350] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.867725] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.868051] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.870347] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.870521] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.870680] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.870935] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.871132] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.871310] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.871474] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.871644] env[62109]: DEBUG nova.virt.hardware [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.872741] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17546aa3-4755-4309-894d-2c30bdc44400 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.882035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccbea62-be44-4516-b0ba-4060c937c0cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.897309] env[62109]: DEBUG nova.compute.manager [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Received event network-changed-8ef102f4-e9bf-4498-8590-632fb7e51a15 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 655.897444] env[62109]: DEBUG nova.compute.manager [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Refreshing instance network info cache due to event network-changed-8ef102f4-e9bf-4498-8590-632fb7e51a15. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 655.897714] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] Acquiring lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.897857] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] Acquired lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.898025] env[62109]: DEBUG nova.network.neutron [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Refreshing network info cache for port 8ef102f4-e9bf-4498-8590-632fb7e51a15 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 656.105302] env[62109]: ERROR nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 656.105302] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 656.105302] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 656.105302] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 656.105302] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.105302] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.105302] env[62109]: ERROR nova.compute.manager raise self.value [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 656.105302] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 656.105302] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.105302] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 656.105714] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.105714] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 656.105714] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 656.105714] env[62109]: ERROR nova.compute.manager [ 656.105714] env[62109]: Traceback (most recent call last): [ 656.105714] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 656.105714] env[62109]: listener.cb(fileno) [ 656.105714] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.105714] env[62109]: result = function(*args, **kwargs) [ 656.105714] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 656.105714] env[62109]: return func(*args, **kwargs) [ 656.105714] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 656.105714] env[62109]: raise e [ 656.105714] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 656.105714] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 656.105714] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 656.105714] env[62109]: created_port_ids = self._update_ports_for_instance( [ 656.105714] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 656.105714] env[62109]: with excutils.save_and_reraise_exception(): [ 656.105714] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.105714] env[62109]: self.force_reraise() [ 656.105714] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.105714] env[62109]: raise self.value [ 656.105714] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 656.105714] env[62109]: updated_port = self._update_port( [ 656.105714] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.105714] env[62109]: _ensure_no_port_binding_failure(port) [ 656.105714] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.105714] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 656.106434] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 656.106434] env[62109]: Removing descriptor: 15 [ 656.106434] env[62109]: ERROR nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] Traceback (most recent call last): [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] yield resources [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.driver.spawn(context, instance, image_meta, [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 656.106434] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] vm_ref = self.build_virtual_machine(instance, [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] vif_infos = vmwarevif.get_vif_info(self._session, [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] for vif in network_info: [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self._sync_wrapper(fn, *args, **kwargs) [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.wait() [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self[:] = self._gt.wait() [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self._exit_event.wait() [ 656.106993] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] result = hub.switch() [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self.greenlet.switch() [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] result = function(*args, **kwargs) [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return func(*args, **kwargs) [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise e [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] nwinfo = self.network_api.allocate_for_instance( [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 656.108062] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] created_port_ids = self._update_ports_for_instance( [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] with excutils.save_and_reraise_exception(): [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.force_reraise() [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise self.value [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] updated_port = self._update_port( [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] _ensure_no_port_binding_failure(port) [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.108555] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise exception.PortBindingFailed(port_id=port['id']) [ 656.108875] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 656.108875] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] [ 656.108875] env[62109]: INFO nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Terminating instance [ 656.108875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.341087] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.420016] env[62109]: DEBUG nova.network.neutron [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.452361] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.493150] env[62109]: DEBUG nova.network.neutron [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.956320] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Releasing lock "refresh_cache-a05a3519-0395-4e49-b655-a6c6d7bd85a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.956602] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 656.956732] env[62109]: DEBUG nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 656.956913] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 656.974706] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.995392] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a8e3f4-d926-4f5c-9120-1d63c4c50f0d req-5cd2d788-0d13-4a29-8e33-c7f28fc860bd service nova] Releasing lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.998316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.998506] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 657.208928] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd5fcaf-eebb-4ee5-a80e-533f630e48cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.216030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8e20e1-00f3-45fe-8771-454d09bdf048 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.246425] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d984d4-acf0-4aea-bfb8-3500e5a39767 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.253712] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c0ee2c-6ddf-4349-86e1-763df87e25da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.268464] env[62109]: DEBUG nova.compute.provider_tree [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.477485] env[62109]: DEBUG nova.network.neutron [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.516697] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 657.592033] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.771326] env[62109]: DEBUG nova.scheduler.client.report [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.938936] env[62109]: DEBUG nova.compute.manager [req-5d581289-3be7-49ca-94ce-75613e20668e req-d008921b-ce19-4fc4-8cd6-fc283426cea2 service nova] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Received event network-vif-deleted-8ef102f4-e9bf-4498-8590-632fb7e51a15 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 657.980676] env[62109]: INFO nova.compute.manager [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] [instance: a05a3519-0395-4e49-b655-a6c6d7bd85a9] Took 1.02 seconds to deallocate network for instance. [ 658.096094] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.096539] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 658.096774] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 658.097036] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ceab1a2-97c2-4fff-9188-6cfe558ded45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.105711] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883eaeba-c387-47cb-9c2a-4db8ac8c84dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.127423] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5f24014-2196-4c44-b947-a80ac75197de could not be found. [ 658.127657] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 658.127863] env[62109]: INFO nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Took 0.03 seconds to destroy the instance on the hypervisor. [ 658.128126] env[62109]: DEBUG oslo.service.loopingcall [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.128357] env[62109]: DEBUG nova.compute.manager [-] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.128454] env[62109]: DEBUG nova.network.neutron [-] [instance: f5f24014-2196-4c44-b947-a80ac75197de] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 658.144967] env[62109]: DEBUG nova.network.neutron [-] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.277771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.277771] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 658.282719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.834s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.284129] env[62109]: INFO nova.compute.claims [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.647393] env[62109]: DEBUG nova.network.neutron [-] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.790773] env[62109]: DEBUG nova.compute.utils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.792568] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 658.792799] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 658.831897] env[62109]: DEBUG nova.policy [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '819593f9e34f4925817f7afbdc7bfa92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34411728d264f3fb50e9108b44eb70e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 659.013202] env[62109]: INFO nova.scheduler.client.report [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Deleted allocations for instance a05a3519-0395-4e49-b655-a6c6d7bd85a9 [ 659.141249] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Successfully created port: f663db46-f083-4755-9c3f-34e9e4a7b8b3 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.150885] env[62109]: INFO nova.compute.manager [-] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Took 1.02 seconds to deallocate network for instance. [ 659.155122] env[62109]: DEBUG nova.compute.claims [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 659.155370] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.297416] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 659.521689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-14bd4f82-9e3a-413d-a1dd-5d6c5f510e1a tempest-ServersAdminTestJSON-285907221 tempest-ServersAdminTestJSON-285907221-project-member] Lock "a05a3519-0395-4e49-b655-a6c6d7bd85a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.722s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.778954] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fb37e6-7fda-4307-b048-5b97e17a1e64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.788539] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21b787c-7601-4fdc-ba82-6a19f7bf2120 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.821850] env[62109]: INFO nova.virt.block_device [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Booting with volume 59f880dd-ea4d-45e6-9eec-ecf8ff559d57 at /dev/sda [ 659.826375] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed36a26-5d0f-4438-8384-b8e7d1e35fa7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.842471] env[62109]: DEBUG nova.compute.manager [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Received event network-changed-f663db46-f083-4755-9c3f-34e9e4a7b8b3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 659.842668] env[62109]: DEBUG nova.compute.manager [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Refreshing instance network info cache due to event network-changed-f663db46-f083-4755-9c3f-34e9e4a7b8b3. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 659.842871] env[62109]: DEBUG oslo_concurrency.lockutils [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] Acquiring lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.843016] env[62109]: DEBUG oslo_concurrency.lockutils [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] Acquired lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.843178] env[62109]: DEBUG nova.network.neutron [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Refreshing network info cache for port f663db46-f083-4755-9c3f-34e9e4a7b8b3 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 659.845659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f25aaaf-d58f-441b-b97e-baa818442140 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.860013] env[62109]: DEBUG nova.compute.provider_tree [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.873605] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e08a50f5-2501-43dd-82a0-c3d0974f2e20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.882766] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae02bc23-5423-4920-b090-6856056ff635 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.906699] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3e08822-dee3-41bb-9b25-c746f02f6356 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.914387] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d72e19-dd77-45af-ad0e-afa85d58b51e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.943106] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee2becf-4c1f-4922-97b6-8d1f813dfc2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.950111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c971513a-601e-4306-b7eb-7a71713c1893 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.966886] env[62109]: DEBUG nova.virt.block_device [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating existing volume attachment record: d5e91c1d-1407-489b-a1ca-2518e25911d6 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 660.019534] env[62109]: ERROR nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 660.019534] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.019534] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.019534] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.019534] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.019534] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.019534] env[62109]: ERROR nova.compute.manager raise self.value [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.019534] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 660.019534] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.019534] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 660.020220] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.020220] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 660.020220] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 660.020220] env[62109]: ERROR nova.compute.manager [ 660.020220] env[62109]: Traceback (most recent call last): [ 660.020220] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 660.020220] env[62109]: listener.cb(fileno) [ 660.020220] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.020220] env[62109]: result = function(*args, **kwargs) [ 660.020220] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.020220] env[62109]: return func(*args, **kwargs) [ 660.020220] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 660.020220] env[62109]: raise e [ 660.020220] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.020220] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 660.020220] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.020220] env[62109]: created_port_ids = self._update_ports_for_instance( [ 660.020220] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.020220] env[62109]: with excutils.save_and_reraise_exception(): [ 660.020220] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.020220] env[62109]: self.force_reraise() [ 660.020220] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.020220] env[62109]: raise self.value [ 660.020220] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.020220] env[62109]: updated_port = self._update_port( [ 660.020220] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.020220] env[62109]: _ensure_no_port_binding_failure(port) [ 660.020220] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.020220] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 660.021030] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 660.021030] env[62109]: Removing descriptor: 18 [ 660.024340] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 660.361892] env[62109]: DEBUG nova.network.neutron [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.364352] env[62109]: DEBUG nova.scheduler.client.report [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 660.442197] env[62109]: DEBUG nova.network.neutron [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.546237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.868759] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.869275] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 660.872778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.340s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.874052] env[62109]: INFO nova.compute.claims [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.944773] env[62109]: DEBUG oslo_concurrency.lockutils [req-f3d83a35-8a33-4742-ac91-216d5809376a req-fb019dea-b4ce-46bb-bc5c-4716af384d15 service nova] Releasing lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.378704] env[62109]: DEBUG nova.compute.utils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 661.384035] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 661.883165] env[62109]: DEBUG nova.compute.manager [req-054c7c6c-e2e8-412f-b2d8-da850e3de9e2 req-020628d9-96bf-4e5a-b389-a22607974565 service nova] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Received event network-vif-deleted-f663db46-f083-4755-9c3f-34e9e4a7b8b3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 661.883737] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 662.066381] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.066909] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.067131] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.067540] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.067540] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.067639] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.067730] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.067924] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.068088] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.068247] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.068402] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.068571] env[62109]: DEBUG nova.virt.hardware [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.069676] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714e6090-d5d3-43eb-a7b0-dafa8884cb9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.081501] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d966df2-ed5b-46ba-8e1e-c8451dbdea0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.100619] env[62109]: ERROR nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Traceback (most recent call last): [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] yield resources [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.driver.spawn(context, instance, image_meta, [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] vm_ref = self.build_virtual_machine(instance, [ 662.100619] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] for vif in network_info: [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return self._sync_wrapper(fn, *args, **kwargs) [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.wait() [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self[:] = self._gt.wait() [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return self._exit_event.wait() [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.101378] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] current.throw(*self._exc) [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] result = function(*args, **kwargs) [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return func(*args, **kwargs) [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise e [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] nwinfo = self.network_api.allocate_for_instance( [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] created_port_ids = self._update_ports_for_instance( [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] with excutils.save_and_reraise_exception(): [ 662.101743] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.force_reraise() [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise self.value [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] updated_port = self._update_port( [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] _ensure_no_port_binding_failure(port) [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise exception.PortBindingFailed(port_id=port['id']) [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 662.102056] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] [ 662.102056] env[62109]: INFO nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Terminating instance [ 662.105527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquiring lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.107064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquired lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.107064] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.352271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41536461-8fe3-45d8-9474-209ba0817c80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.360374] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a99e68-85dd-45ad-86c0-086ebf322cf2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.394717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a8daef-a5eb-4e09-a1c3-176695b05756 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.403139] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f2d0ad-d302-4218-aa45-73bdfccadfe6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.417403] env[62109]: DEBUG nova.compute.provider_tree [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.630463] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.774604] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.898671] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.939208] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.939208] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.939208] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.939208] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.939422] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.939422] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.939422] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.939422] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.939422] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.939554] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.939554] env[62109]: DEBUG nova.virt.hardware [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.940194] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e527fa30-bce9-4914-ab65-752650cde921 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.949124] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bafb0f-7940-4104-9849-8208f59d36a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.958730] env[62109]: DEBUG nova.scheduler.client.report [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 58 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 662.959243] env[62109]: DEBUG nova.compute.provider_tree [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 58 to 59 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.959530] env[62109]: DEBUG nova.compute.provider_tree [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.971555] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.977750] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating folder: Project (a78b39356bc84b638109dea5eaaae5a9). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.978725] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.106s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.979186] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 662.981499] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7837ada6-17d6-4115-8571-3e7154ffe7b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.983469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.849s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.996250] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Created folder: Project (a78b39356bc84b638109dea5eaaae5a9) in parent group-v244329. [ 662.996438] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating folder: Instances. Parent ref: group-v244341. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.996675] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67ae8661-2244-4a3c-a228-13b77c4a022d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.006615] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Created folder: Instances in parent group-v244341. [ 663.006852] env[62109]: DEBUG oslo.service.loopingcall [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.007053] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.007262] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27e404e5-7296-463f-a0d2-4f054f8405fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.025051] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.025051] env[62109]: value = "task-1116123" [ 663.025051] env[62109]: _type = "Task" [ 663.025051] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.033231] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116123, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.276453] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Releasing lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.277077] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 663.277428] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-628826d1-ca84-40f7-8984-1bac91ba15c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.286607] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b04012-8123-44b7-a43f-92bb3082542b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.310613] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 4c02989b-4638-41b8-bccb-f2605c883346 could not be found. [ 663.310864] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 663.311179] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a597fe53-70c1-46c3-b0c7-0d1cf2f7c5e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.319657] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a694375-9d8f-4ebc-bb99-494f1acf7501 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.343278] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c02989b-4638-41b8-bccb-f2605c883346 could not be found. [ 663.343531] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 663.343712] env[62109]: INFO nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Took 0.07 seconds to destroy the instance on the hypervisor. [ 663.343958] env[62109]: DEBUG oslo.service.loopingcall [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.344269] env[62109]: DEBUG nova.compute.manager [-] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.344345] env[62109]: DEBUG nova.network.neutron [-] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 663.362510] env[62109]: DEBUG nova.network.neutron [-] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.488292] env[62109]: DEBUG nova.compute.utils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.493807] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 663.494072] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.538644] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116123, 'name': CreateVM_Task, 'duration_secs': 0.26283} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.538811] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.544109] env[62109]: DEBUG oslo_vmware.service [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df308f68-8bba-4502-a2a8-7373377e8938 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.552559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.552731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.553213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.553538] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-025fdef2-4be2-4594-b871-3a4a2f59974e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.563209] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 663.563209] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522e3836-975c-beee-7114-8d53abc31c4e" [ 663.563209] env[62109]: _type = "Task" [ 663.563209] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.574786] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522e3836-975c-beee-7114-8d53abc31c4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.590815] env[62109]: DEBUG nova.policy [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f24af6929eb4a4897d33b0847b5bb41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92265b8bc7fd4e9485df360c77e75b21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 663.864882] env[62109]: DEBUG nova.network.neutron [-] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.963882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb012f43-b3f0-4999-b76e-c45cb621ed9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.972511] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0032415-40be-4313-b072-c5d7606bc8ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.007712] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 664.012102] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea6b1f4-8c64-4219-a011-1aa290f9716a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.019474] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Successfully created port: 181c75f9-e3b3-4820-9c7c-f3363ff5d514 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.022441] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4854b0d-f75e-4cd0-b9d0-7b6b5d7f6d5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.036563] env[62109]: DEBUG nova.compute.provider_tree [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.073405] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.073669] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.073899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.074055] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.074272] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.074517] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9162aeae-4c24-4f29-bb48-78f7bb979076 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.082937] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.083399] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 664.084213] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c80f461-0d47-4bec-9722-0a39e8faae87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.092409] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3939cda3-9c2b-4749-82f1-fb36d4a211ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.098372] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 664.098372] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522b1f7b-b8ef-c5d7-24c2-35922ed2e74b" [ 664.098372] env[62109]: _type = "Task" [ 664.098372] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.110679] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522b1f7b-b8ef-c5d7-24c2-35922ed2e74b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.371719] env[62109]: INFO nova.compute.manager [-] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Took 1.03 seconds to deallocate network for instance. [ 664.539904] env[62109]: DEBUG nova.scheduler.client.report [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.610111] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 664.610397] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating directory with path [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.610644] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75eca840-9224-4268-8f89-e232b5d4aaad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.636994] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Created directory with path [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.637242] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Fetch image to [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 664.637491] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Downloading image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk on the data store datastore1 {{(pid=62109) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 664.638228] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59cd3ca-b1aa-4127-b1d9-3c6efe8d9d65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.645922] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b2ea14-1ff7-4b61-afa6-d1c2d4ab4852 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.656766] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf9d0e0-d5d2-4a92-b73a-c1019764b5e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.689627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9975b5fa-7f72-4379-884f-6a57adcb5cc5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.696596] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-beac7932-cea2-44d7-ad9c-3e6165efa73d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.721819] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Downloading image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to the data store datastore1 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 664.801640] env[62109]: DEBUG oslo_vmware.rw_handles [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 664.869528] env[62109]: DEBUG nova.compute.manager [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Received event network-changed-181c75f9-e3b3-4820-9c7c-f3363ff5d514 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.871298] env[62109]: DEBUG nova.compute.manager [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Refreshing instance network info cache due to event network-changed-181c75f9-e3b3-4820-9c7c-f3363ff5d514. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 664.871298] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] Acquiring lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.871298] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] Acquired lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.871298] env[62109]: DEBUG nova.network.neutron [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Refreshing network info cache for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 664.935546] env[62109]: INFO nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Took 0.56 seconds to detach 1 volumes for instance. [ 664.937814] env[62109]: DEBUG nova.compute.claims [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 664.937983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.022967] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 665.045775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.062s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.046445] env[62109]: ERROR nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Traceback (most recent call last): [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.driver.spawn(context, instance, image_meta, [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] vm_ref = self.build_virtual_machine(instance, [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.046445] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] for vif in network_info: [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self._sync_wrapper(fn, *args, **kwargs) [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.wait() [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self[:] = self._gt.wait() [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self._exit_event.wait() [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] result = hub.switch() [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.046752] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return self.greenlet.switch() [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] result = function(*args, **kwargs) [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] return func(*args, **kwargs) [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise e [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] nwinfo = self.network_api.allocate_for_instance( [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] created_port_ids = self._update_ports_for_instance( [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] with excutils.save_and_reraise_exception(): [ 665.047140] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] self.force_reraise() [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise self.value [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] updated_port = self._update_port( [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] _ensure_no_port_binding_failure(port) [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] raise exception.PortBindingFailed(port_id=port['id']) [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] nova.exception.PortBindingFailed: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. [ 665.047440] env[62109]: ERROR nova.compute.manager [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] [ 665.047702] env[62109]: DEBUG nova.compute.utils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 665.050359] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.050605] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.050730] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.050909] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.051069] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.051451] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.051451] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.051740] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.051930] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.052130] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.052375] env[62109]: DEBUG nova.virt.hardware [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.052650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.868s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.054350] env[62109]: INFO nova.compute.claims [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.057781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcead4ed-8cff-4b85-bc95-d4515332aec5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.062749] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Build of instance e50019d2-d9a1-4077-ba1a-7f7bde266058 was re-scheduled: Binding failed for port f196c7a4-5f7e-40b5-a3d7-3eda1245c24e, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 665.063226] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 665.063454] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquiring lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.063596] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Acquired lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.063915] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.077675] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de913b9-97b3-4bd3-8cfe-132a64292c70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.104890] env[62109]: ERROR nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 665.104890] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.104890] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 665.104890] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 665.104890] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.104890] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.104890] env[62109]: ERROR nova.compute.manager raise self.value [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 665.104890] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 665.104890] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.104890] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 665.105396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.105396] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 665.105396] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 665.105396] env[62109]: ERROR nova.compute.manager [ 665.105396] env[62109]: Traceback (most recent call last): [ 665.105396] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 665.105396] env[62109]: listener.cb(fileno) [ 665.105396] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.105396] env[62109]: result = function(*args, **kwargs) [ 665.105396] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.105396] env[62109]: return func(*args, **kwargs) [ 665.105396] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.105396] env[62109]: raise e [ 665.105396] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.105396] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 665.105396] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 665.105396] env[62109]: created_port_ids = self._update_ports_for_instance( [ 665.105396] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 665.105396] env[62109]: with excutils.save_and_reraise_exception(): [ 665.105396] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.105396] env[62109]: self.force_reraise() [ 665.105396] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.105396] env[62109]: raise self.value [ 665.105396] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 665.105396] env[62109]: updated_port = self._update_port( [ 665.105396] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.105396] env[62109]: _ensure_no_port_binding_failure(port) [ 665.105396] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.105396] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 665.106205] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 665.106205] env[62109]: Removing descriptor: 18 [ 665.106205] env[62109]: ERROR nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Traceback (most recent call last): [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] yield resources [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.driver.spawn(context, instance, image_meta, [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.106205] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] vm_ref = self.build_virtual_machine(instance, [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] for vif in network_info: [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self._sync_wrapper(fn, *args, **kwargs) [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.wait() [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self[:] = self._gt.wait() [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self._exit_event.wait() [ 665.106565] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] result = hub.switch() [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self.greenlet.switch() [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] result = function(*args, **kwargs) [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return func(*args, **kwargs) [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise e [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] nwinfo = self.network_api.allocate_for_instance( [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 665.106922] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] created_port_ids = self._update_ports_for_instance( [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] with excutils.save_and_reraise_exception(): [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.force_reraise() [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise self.value [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] updated_port = self._update_port( [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] _ensure_no_port_binding_failure(port) [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.107294] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise exception.PortBindingFailed(port_id=port['id']) [ 665.107628] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 665.107628] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] [ 665.107628] env[62109]: INFO nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Terminating instance [ 665.108836] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquiring lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.397270] env[62109]: DEBUG nova.network.neutron [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.448527] env[62109]: DEBUG oslo_vmware.rw_handles [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 665.448772] env[62109]: DEBUG oslo_vmware.rw_handles [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 665.456182] env[62109]: DEBUG nova.network.neutron [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.581856] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Downloaded image file data 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk on the data store datastore1 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 665.584095] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 665.584326] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copying Virtual Disk [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk to [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 665.584883] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77e10f6d-0a36-41c7-b113-ba59b98d8936 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.588094] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.596937] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 665.596937] env[62109]: value = "task-1116124" [ 665.596937] env[62109]: _type = "Task" [ 665.596937] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.608895] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.645278] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.959267] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a5ef03f-801d-4038-875b-f5848df2a1c7 req-efca65a7-dd5f-4ca8-82e2-8e6e1c63d0f8 service nova] Releasing lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.959753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquired lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.959950] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.109429] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116124, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.149610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Releasing lock "refresh_cache-e50019d2-d9a1-4077-ba1a-7f7bde266058" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.149884] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 666.150098] env[62109]: DEBUG nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 666.150340] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 666.171837] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.476490] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9693a613-b5e7-4efc-a372-dbdb62a7bfa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.480151] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.487445] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a7337b-10c9-4b5d-8520-8d213cfd2fb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.520044] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff289e1-4669-4657-bf67-47ea302331e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.527988] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2a274e-2995-49e0-8e05-6ae9c3f58bad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.532870] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.541958] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.611736] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73353} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.612120] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copied Virtual Disk [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk to [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 666.612238] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleting the datastore file [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/tmp-sparse.vmdk {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.612499] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-048b227f-39ec-45a1-bd84-ca391bba2d61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.619741] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 666.619741] env[62109]: value = "task-1116125" [ 666.619741] env[62109]: _type = "Task" [ 666.619741] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.628681] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.674734] env[62109]: DEBUG nova.network.neutron [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.971447] env[62109]: DEBUG nova.compute.manager [req-146a7366-8dfe-4ec5-a7e3-e15fe5cecad8 req-55fa4fb3-dbc7-4c67-a1d0-73f3b479b33b service nova] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Received event network-vif-deleted-181c75f9-e3b3-4820-9c7c-f3363ff5d514 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 667.043955] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Releasing lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.044588] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 667.044785] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 667.047789] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dc6d6e4-f239-4db2-8526-abef47775d56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.055097] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.055097] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.060921] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e94bc2-6303-4456-8615-a8bd3fcf5396 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.072627] env[62109]: ERROR nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [req-05ce7747-5739-4691-b5ef-58202d5bcd01] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-05ce7747-5739-4691-b5ef-58202d5bcd01"}]} [ 667.089373] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ef14775-9be3-4275-b5ca-dad43aa61dd3 could not be found. [ 667.089600] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 667.089832] env[62109]: INFO nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 667.090128] env[62109]: DEBUG oslo.service.loopingcall [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.091202] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 667.093158] env[62109]: DEBUG nova.compute.manager [-] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.093276] env[62109]: DEBUG nova.network.neutron [-] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 667.109587] env[62109]: DEBUG nova.network.neutron [-] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.114563] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 667.114563] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 667.131231] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027297} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.132133] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 667.134058] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 667.134339] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Moving file from [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 to [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8. {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 667.135777] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-7ff5654f-f8b8-42e1-9df2-cc4add0af20e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.142344] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 667.142344] env[62109]: value = "task-1116126" [ 667.142344] env[62109]: _type = "Task" [ 667.142344] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.151180] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116126, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.153666] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 667.177146] env[62109]: INFO nova.compute.manager [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] [instance: e50019d2-d9a1-4077-ba1a-7f7bde266058] Took 1.03 seconds to deallocate network for instance. [ 667.534803] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bee7ef2-b261-4fac-b5a1-e1651a45425b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.542648] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b46bd7-38ed-433b-ad1e-cc8b2a00f894 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.573027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41eee97-cf15-429d-aa66-67484b9b9598 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.581343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd1f8c5-9413-4f7a-83d1-d9f8d5b99bea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.598469] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 667.612085] env[62109]: DEBUG nova.network.neutron [-] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.652258] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116126, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028792} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.652521] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] File moved {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 667.652714] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Cleaning up location [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 667.652876] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleting the datastore file [datastore1] vmware_temp/46ba7cb3-cec9-4d36-a617-94725e69d687 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 667.653145] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed1d582f-f12d-48f4-998a-bd4dc6a164a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.663094] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 667.663094] env[62109]: value = "task-1116127" [ 667.663094] env[62109]: _type = "Task" [ 667.663094] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.674074] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.114616] env[62109]: INFO nova.compute.manager [-] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Took 1.02 seconds to deallocate network for instance. [ 668.116991] env[62109]: DEBUG nova.compute.claims [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 668.117183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.131416] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 668.131679] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 60 to 61 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 668.131856] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 668.173543] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045497} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.173738] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.174475] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f55f071f-a8e7-4ea6-b8e9-893a18588571 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.180802] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 668.180802] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525ccd62-60b6-956e-4676-842578a5c8fc" [ 668.180802] env[62109]: _type = "Task" [ 668.180802] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.191250] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525ccd62-60b6-956e-4676-842578a5c8fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.210869] env[62109]: INFO nova.scheduler.client.report [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Deleted allocations for instance e50019d2-d9a1-4077-ba1a-7f7bde266058 [ 668.637472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.585s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.638100] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 668.641244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.878s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.641399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.641544] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 668.641824] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.795s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.645737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bbe758-4001-4693-a4a7-db7d0723c572 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.655116] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e3cc9b-cb58-41b5-8d5a-de8755252345 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.670850] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ed4b16-0ccb-447d-90da-48a12eb25a95 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.679203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fe0bd0-72e9-4b4b-9403-a742309819cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.692349] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525ccd62-60b6-956e-4676-842578a5c8fc, 'name': SearchDatastore_Task, 'duration_secs': 0.012441} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.717894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.718208] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 668.718925] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181581MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 668.719078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.719581] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ec91135-2e01-400b-b22e-93000283a922 tempest-DeleteServersAdminTestJSON-1833947483 tempest-DeleteServersAdminTestJSON-1833947483-project-member] Lock "e50019d2-d9a1-4077-ba1a-7f7bde266058" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.647s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.719761] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01fecb52-3353-4c28-8fb3-ec75248a08bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.728904] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 668.728904] env[62109]: value = "task-1116128" [ 668.728904] env[62109]: _type = "Task" [ 668.728904] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.737952] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.147226] env[62109]: DEBUG nova.compute.utils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 669.148974] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 669.149612] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 669.206937] env[62109]: DEBUG nova.policy [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba93b86833f34307b0c61fcf7ad1cea8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ced540df806498ab4f5865f0bcc508d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 669.226397] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 669.247410] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116128, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.623813] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Successfully created port: c1a7f1e1-bc39-4b71-a227-069264e706de {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.653277] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 669.710345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fdad25-6d4f-4858-8e1a-1f5b69577c76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.719099] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fb7e8c-4984-49fa-b7c7-fbdf330129e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.757090] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226a1baa-2854-45b6-8746-8693986ba9c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.769877] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528305} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.770175] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 669.770389] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 669.771815] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefae0b0-5987-4d8b-a79d-aa948e712d8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.776625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.776868] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f30ce95f-ed66-4991-954e-d4f4bb6b32f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.790854] env[62109]: DEBUG nova.compute.provider_tree [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.793401] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 669.793401] env[62109]: value = "task-1116129" [ 669.793401] env[62109]: _type = "Task" [ 669.793401] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.803104] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.295493] env[62109]: DEBUG nova.scheduler.client.report [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.309140] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158299} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.311526] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 670.311526] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f09b390-a083-44cd-97d3-8987b8095d98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.335353] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 670.336089] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35b2d318-e5c3-4d57-a100-dfe761407857 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.358402] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 670.358402] env[62109]: value = "task-1116130" [ 670.358402] env[62109]: _type = "Task" [ 670.358402] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.367710] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.562625] env[62109]: DEBUG nova.compute.manager [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Received event network-changed-c1a7f1e1-bc39-4b71-a227-069264e706de {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 670.562841] env[62109]: DEBUG nova.compute.manager [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Refreshing instance network info cache due to event network-changed-c1a7f1e1-bc39-4b71-a227-069264e706de. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 670.563025] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] Acquiring lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.563200] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] Acquired lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.563316] env[62109]: DEBUG nova.network.neutron [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Refreshing network info cache for port c1a7f1e1-bc39-4b71-a227-069264e706de {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 670.657349] env[62109]: ERROR nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 670.657349] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.657349] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.657349] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.657349] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.657349] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.657349] env[62109]: ERROR nova.compute.manager raise self.value [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.657349] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 670.657349] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.657349] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 670.658032] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.658032] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 670.658032] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 670.658032] env[62109]: ERROR nova.compute.manager [ 670.658032] env[62109]: Traceback (most recent call last): [ 670.658032] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 670.658032] env[62109]: listener.cb(fileno) [ 670.658032] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.658032] env[62109]: result = function(*args, **kwargs) [ 670.658032] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.658032] env[62109]: return func(*args, **kwargs) [ 670.658032] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.658032] env[62109]: raise e [ 670.658032] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.658032] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 670.658032] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.658032] env[62109]: created_port_ids = self._update_ports_for_instance( [ 670.658032] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.658032] env[62109]: with excutils.save_and_reraise_exception(): [ 670.658032] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.658032] env[62109]: self.force_reraise() [ 670.658032] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.658032] env[62109]: raise self.value [ 670.658032] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.658032] env[62109]: updated_port = self._update_port( [ 670.658032] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.658032] env[62109]: _ensure_no_port_binding_failure(port) [ 670.658032] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.658032] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 670.658864] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 670.658864] env[62109]: Removing descriptor: 18 [ 670.665871] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 670.699024] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 670.699024] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 670.699024] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.699207] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 670.699207] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.699326] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 670.699740] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 670.700151] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 670.700466] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 670.701478] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 670.701478] env[62109]: DEBUG nova.virt.hardware [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 670.702629] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1a1783-7e23-4429-8804-87d172126ccd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.715472] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9dd40f-2f66-42b0-8e0e-84fbf015c7a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.735612] env[62109]: ERROR nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Traceback (most recent call last): [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] yield resources [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.driver.spawn(context, instance, image_meta, [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] vm_ref = self.build_virtual_machine(instance, [ 670.735612] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] for vif in network_info: [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return self._sync_wrapper(fn, *args, **kwargs) [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.wait() [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self[:] = self._gt.wait() [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return self._exit_event.wait() [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 670.736099] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] current.throw(*self._exc) [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] result = function(*args, **kwargs) [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return func(*args, **kwargs) [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise e [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] nwinfo = self.network_api.allocate_for_instance( [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] created_port_ids = self._update_ports_for_instance( [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] with excutils.save_and_reraise_exception(): [ 670.737524] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.force_reraise() [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise self.value [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] updated_port = self._update_port( [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] _ensure_no_port_binding_failure(port) [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise exception.PortBindingFailed(port_id=port['id']) [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 670.737927] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] [ 670.737927] env[62109]: INFO nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Terminating instance [ 670.738281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquiring lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.804999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.163s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.805717] env[62109]: ERROR nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Traceback (most recent call last): [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.driver.spawn(context, instance, image_meta, [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] vm_ref = self.build_virtual_machine(instance, [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.805717] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] for vif in network_info: [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self._sync_wrapper(fn, *args, **kwargs) [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.wait() [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self[:] = self._gt.wait() [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self._exit_event.wait() [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] result = hub.switch() [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.806079] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return self.greenlet.switch() [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] result = function(*args, **kwargs) [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] return func(*args, **kwargs) [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise e [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] nwinfo = self.network_api.allocate_for_instance( [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] created_port_ids = self._update_ports_for_instance( [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] with excutils.save_and_reraise_exception(): [ 670.806391] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] self.force_reraise() [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise self.value [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] updated_port = self._update_port( [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] _ensure_no_port_binding_failure(port) [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] raise exception.PortBindingFailed(port_id=port['id']) [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] nova.exception.PortBindingFailed: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. [ 670.806795] env[62109]: ERROR nova.compute.manager [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] [ 670.807068] env[62109]: DEBUG nova.compute.utils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 670.810447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.309s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.811643] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Build of instance 590e6f9a-b764-44b4-9117-3deff696a6aa was re-scheduled: Binding failed for port a4fdec54-dd81-4e7f-a2a1-876cd3012614, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 670.811643] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 670.811643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.811643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.811643] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 670.873418] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116130, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.088550] env[62109]: DEBUG nova.network.neutron [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.168144] env[62109]: DEBUG nova.network.neutron [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.350183] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.376254] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116130, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.488862] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.672896] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d52c763-4702-4a7c-9c8f-11c972989cb6 req-d7072684-af45-419c-96e9-8a68d0a0f32d service nova] Releasing lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.673509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquired lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.673700] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 671.725552] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82f1714-e2c8-40a7-9452-2c42661dd151 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.741023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11fe96f-5cd9-49ba-8ad7-f62b46f91d2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.781131] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf447a56-9796-40f9-bb3c-210509c9df7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.788828] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763c9733-8657-40b9-b16d-76b2807940d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.806823] env[62109]: DEBUG nova.compute.provider_tree [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.872706] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116130, 'name': ReconfigVM_Task, 'duration_secs': 1.271443} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.873030] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 671.879341] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24687be9-eda0-4d46-8251-96f8e88b9cb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.883848] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 671.883848] env[62109]: value = "task-1116131" [ 671.883848] env[62109]: _type = "Task" [ 671.883848] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.891575] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116131, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.994609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-590e6f9a-b764-44b4-9117-3deff696a6aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.994882] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 671.995098] env[62109]: DEBUG nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.995310] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 672.015424] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.209473] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.307909] env[62109]: DEBUG nova.scheduler.client.report [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.348080] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.394426] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116131, 'name': Rename_Task, 'duration_secs': 0.137976} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.394768] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 672.395038] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11d9dba8-406b-4453-a194-66935b563cb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.401779] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 672.401779] env[62109]: value = "task-1116132" [ 672.401779] env[62109]: _type = "Task" [ 672.401779] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.518436] env[62109]: DEBUG nova.network.neutron [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.630456] env[62109]: DEBUG nova.compute.manager [req-1e9bd8bd-ecbf-47b1-aeff-4beab05d6e8a req-d6d76979-5137-4197-b1c7-227fa2533dbd service nova] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Received event network-vif-deleted-c1a7f1e1-bc39-4b71-a227-069264e706de {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.813552] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.815650] env[62109]: ERROR nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Traceback (most recent call last): [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.driver.spawn(context, instance, image_meta, [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] vm_ref = self.build_virtual_machine(instance, [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.815650] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] for vif in network_info: [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return self._sync_wrapper(fn, *args, **kwargs) [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.wait() [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self[:] = self._gt.wait() [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return self._exit_event.wait() [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] current.throw(*self._exc) [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.816323] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] result = function(*args, **kwargs) [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] return func(*args, **kwargs) [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise e [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] nwinfo = self.network_api.allocate_for_instance( [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] created_port_ids = self._update_ports_for_instance( [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] with excutils.save_and_reraise_exception(): [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] self.force_reraise() [ 672.817148] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise self.value [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] updated_port = self._update_port( [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] _ensure_no_port_binding_failure(port) [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] raise exception.PortBindingFailed(port_id=port['id']) [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] nova.exception.PortBindingFailed: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. [ 672.817994] env[62109]: ERROR nova.compute.manager [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] [ 672.817994] env[62109]: DEBUG nova.compute.utils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.820596] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.873s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.821842] env[62109]: INFO nova.compute.claims [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.834387] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Build of instance 226938f5-f903-4671-b7a3-c6f5a264506e was re-scheduled: Binding failed for port 49919bfd-f918-4e38-ba86-940a0ce0728a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.834957] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.836047] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.836249] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.836349] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.850308] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Releasing lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.851404] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 672.851404] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 672.851404] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5bc4609-0d98-4b52-ba11-d781a7e8ddd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.863443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e622ce47-9158-4387-86e9-ee414facbdcf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.890657] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 17fd1633-f327-47e9-905f-60c8c7446c7e could not be found. [ 672.890748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 672.890915] env[62109]: INFO nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 672.891177] env[62109]: DEBUG oslo.service.loopingcall [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.891404] env[62109]: DEBUG nova.compute.manager [-] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.891496] env[62109]: DEBUG nova.network.neutron [-] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 672.914936] env[62109]: DEBUG nova.network.neutron [-] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.919976] env[62109]: DEBUG oslo_vmware.api [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116132, 'name': PowerOnVM_Task, 'duration_secs': 0.464075} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.921027] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 672.921027] env[62109]: INFO nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Took 10.02 seconds to spawn the instance on the hypervisor. [ 672.921027] env[62109]: DEBUG nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 672.921483] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4a7be6-a007-4c5e-90b5-e896148c4dd8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.024256] env[62109]: INFO nova.compute.manager [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 590e6f9a-b764-44b4-9117-3deff696a6aa] Took 1.03 seconds to deallocate network for instance. [ 673.359550] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.421514] env[62109]: DEBUG nova.network.neutron [-] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.447557] env[62109]: INFO nova.compute.manager [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Took 33.02 seconds to build instance. [ 673.468224] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.924223] env[62109]: INFO nova.compute.manager [-] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Took 1.03 seconds to deallocate network for instance. [ 673.927470] env[62109]: DEBUG nova.compute.claims [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 673.927839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.950289] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ab45b0b4-102c-44f4-bc48-ebfafdf71271 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.515s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.970651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-226938f5-f903-4671-b7a3-c6f5a264506e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.970876] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.971106] env[62109]: DEBUG nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.971275] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.993637] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.064992] env[62109]: INFO nova.scheduler.client.report [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted allocations for instance 590e6f9a-b764-44b4-9117-3deff696a6aa [ 674.110735] env[62109]: INFO nova.compute.manager [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Rebuilding instance [ 674.183664] env[62109]: DEBUG nova.compute.manager [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 674.186505] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1798396b-d7bd-4481-a078-17e5fffc6de5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.331528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a191e43d-5529-494e-b5f5-7f14c3456afa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.339919] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd35bc0-00a7-4253-9c3c-4dd5195ccfb8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.379743] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ba1e7e-abb2-4975-8ea7-fbbd772435f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.388944] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9151f3ce-1d01-445f-a3e1-9b83a3877738 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.403602] env[62109]: DEBUG nova.compute.provider_tree [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.459234] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 674.496590] env[62109]: DEBUG nova.network.neutron [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.574660] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c7489f13-257d-4911-86c2-c62976a694cc tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "590e6f9a-b764-44b4-9117-3deff696a6aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.799s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.699863] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 674.699987] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16059eb6-97bd-4393-9722-990d12cafb91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.707723] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 674.707723] env[62109]: value = "task-1116133" [ 674.707723] env[62109]: _type = "Task" [ 674.707723] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.716284] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.906510] env[62109]: DEBUG nova.scheduler.client.report [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.981742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.003482] env[62109]: INFO nova.compute.manager [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 226938f5-f903-4671-b7a3-c6f5a264506e] Took 1.03 seconds to deallocate network for instance. [ 675.079766] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 675.222692] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116133, 'name': PowerOffVM_Task, 'duration_secs': 0.10965} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.222692] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 675.222692] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 675.223940] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dde79f-a5e6-49a7-a375-eb1eb6780cda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.231513] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 675.231513] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d40eb2f-4c8c-46d3-9ec2-c581893eac08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.257371] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 675.258133] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 675.258133] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleting the datastore file [datastore1] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.258417] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6347b79-1298-445d-8159-e631d899e7db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.265329] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 675.265329] env[62109]: value = "task-1116135" [ 675.265329] env[62109]: _type = "Task" [ 675.265329] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.277454] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.412291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.412468] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 675.415208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.260s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.604601] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.778493] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100544} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.778901] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.779057] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 675.780126] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 675.878386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.878615] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.920520] env[62109]: DEBUG nova.compute.utils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.930195] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 675.930440] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 675.997582] env[62109]: DEBUG nova.policy [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a65ab55ee66140f2a825e4347258d12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57a1783401e34096b84023fc70da3840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.039057] env[62109]: INFO nova.scheduler.client.report [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 226938f5-f903-4671-b7a3-c6f5a264506e [ 676.422129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea954ed5-5379-4a6e-a67c-74af0f780497 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.432297] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 676.435464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03503c7b-7647-4cd4-9686-dd659eefb90e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.470862] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce021d1f-808d-4358-9fc5-5145a95f1e10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.483882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521357e4-5dd3-4a7c-8cc6-2ecd3089a512 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.502534] env[62109]: DEBUG nova.compute.provider_tree [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.543177] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Successfully created port: acb8c8a9-4641-44ff-a89f-f7d5f382b821 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.552798] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c261603-35fd-429d-bb04-cc304bf7533b tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "226938f5-f903-4671-b7a3-c6f5a264506e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.172s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.830511] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.830511] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.830511] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.831193] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.831457] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.831724] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.832065] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.832349] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.832629] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.832895] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.833266] env[62109]: DEBUG nova.virt.hardware [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.834258] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec36e11-e743-40d5-bcf7-6fb2d6c90ea8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.851020] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5ade2c-fae0-47eb-ad4a-bfe7d2e93d12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.866259] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 676.871903] env[62109]: DEBUG oslo.service.loopingcall [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.872219] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 676.872547] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38396fd2-9af7-4cac-8771-f23da8d31d1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.891745] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 676.891745] env[62109]: value = "task-1116136" [ 676.891745] env[62109]: _type = "Task" [ 676.891745] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.912332] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116136, 'name': CreateVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.005742] env[62109]: DEBUG nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.058758] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.074426] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "c5c63ece-611d-45d1-a8e6-9327700f1563" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.074646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.402302] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116136, 'name': CreateVM_Task, 'duration_secs': 0.334359} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.402488] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 677.402916] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.403179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.403899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 677.404186] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6d17859-25f9-4bc8-9a25-05d42f930f37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.409124] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 677.409124] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dd9ae1-d75c-d69f-655f-1c6a7bd8c53f" [ 677.409124] env[62109]: _type = "Task" [ 677.409124] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.417101] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dd9ae1-d75c-d69f-655f-1c6a7bd8c53f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.445728] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 677.472012] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 677.472278] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 677.472436] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.472608] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 677.472750] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.472893] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 677.473112] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 677.473271] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 677.473434] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 677.473593] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 677.473759] env[62109]: DEBUG nova.virt.hardware [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 677.474686] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8df0dd-d11a-4521-8107-b7ca51a12e02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.482795] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2096ec4-b4e7-4235-b910-d538aad6afad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.512811] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.097s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.513470] env[62109]: ERROR nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] Traceback (most recent call last): [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.driver.spawn(context, instance, image_meta, [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] vm_ref = self.build_virtual_machine(instance, [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.513470] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] for vif in network_info: [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self._sync_wrapper(fn, *args, **kwargs) [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.wait() [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self[:] = self._gt.wait() [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self._exit_event.wait() [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] result = hub.switch() [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.513775] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return self.greenlet.switch() [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] result = function(*args, **kwargs) [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] return func(*args, **kwargs) [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise e [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] nwinfo = self.network_api.allocate_for_instance( [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] created_port_ids = self._update_ports_for_instance( [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] with excutils.save_and_reraise_exception(): [ 677.514094] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] self.force_reraise() [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise self.value [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] updated_port = self._update_port( [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] _ensure_no_port_binding_failure(port) [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] raise exception.PortBindingFailed(port_id=port['id']) [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] nova.exception.PortBindingFailed: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. [ 677.514428] env[62109]: ERROR nova.compute.manager [instance: f5f24014-2196-4c44-b947-a80ac75197de] [ 677.514750] env[62109]: DEBUG nova.compute.utils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 677.515625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.970s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.518612] env[62109]: INFO nova.compute.claims [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.519889] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Build of instance f5f24014-2196-4c44-b947-a80ac75197de was re-scheduled: Binding failed for port 8ef102f4-e9bf-4498-8590-632fb7e51a15, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 677.521105] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 677.521105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.521105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.521105] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.581802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.767237] env[62109]: ERROR nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 677.767237] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.767237] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.767237] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.767237] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.767237] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.767237] env[62109]: ERROR nova.compute.manager raise self.value [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.767237] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 677.767237] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.767237] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 677.767740] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.767740] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 677.767740] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 677.767740] env[62109]: ERROR nova.compute.manager [ 677.767740] env[62109]: Traceback (most recent call last): [ 677.767740] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 677.767740] env[62109]: listener.cb(fileno) [ 677.767740] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.767740] env[62109]: result = function(*args, **kwargs) [ 677.767740] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.767740] env[62109]: return func(*args, **kwargs) [ 677.767740] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.767740] env[62109]: raise e [ 677.767740] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.767740] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 677.767740] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.767740] env[62109]: created_port_ids = self._update_ports_for_instance( [ 677.767740] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.767740] env[62109]: with excutils.save_and_reraise_exception(): [ 677.767740] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.767740] env[62109]: self.force_reraise() [ 677.767740] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.767740] env[62109]: raise self.value [ 677.767740] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.767740] env[62109]: updated_port = self._update_port( [ 677.767740] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.767740] env[62109]: _ensure_no_port_binding_failure(port) [ 677.767740] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.767740] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 677.768647] env[62109]: nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 677.768647] env[62109]: Removing descriptor: 18 [ 677.769033] env[62109]: ERROR nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Traceback (most recent call last): [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] yield resources [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.driver.spawn(context, instance, image_meta, [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] vm_ref = self.build_virtual_machine(instance, [ 677.769033] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] for vif in network_info: [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self._sync_wrapper(fn, *args, **kwargs) [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.wait() [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self[:] = self._gt.wait() [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self._exit_event.wait() [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.769377] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] result = hub.switch() [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self.greenlet.switch() [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] result = function(*args, **kwargs) [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return func(*args, **kwargs) [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise e [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] nwinfo = self.network_api.allocate_for_instance( [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] created_port_ids = self._update_ports_for_instance( [ 677.769749] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] with excutils.save_and_reraise_exception(): [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.force_reraise() [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise self.value [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] updated_port = self._update_port( [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] _ensure_no_port_binding_failure(port) [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise exception.PortBindingFailed(port_id=port['id']) [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 677.770135] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] [ 677.770464] env[62109]: INFO nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Terminating instance [ 677.771763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.771922] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.772095] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.922647] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dd9ae1-d75c-d69f-655f-1c6a7bd8c53f, 'name': SearchDatastore_Task, 'duration_secs': 0.03261} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.923095] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.923461] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 677.923814] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.924051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.924324] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 677.924688] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93a9805b-f40a-41ae-a9ef-ff9aaf583cad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.937522] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 677.937734] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 677.938491] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3904f0e4-407e-45c7-9a01-74e0d5edf237 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.945368] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 677.945368] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5207b198-15af-8c6c-840c-907311e62658" [ 677.945368] env[62109]: _type = "Task" [ 677.945368] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.953373] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5207b198-15af-8c6c-840c-907311e62658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.006795] env[62109]: DEBUG nova.compute.manager [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Received event network-changed-acb8c8a9-4641-44ff-a89f-f7d5f382b821 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 678.007076] env[62109]: DEBUG nova.compute.manager [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Refreshing instance network info cache due to event network-changed-acb8c8a9-4641-44ff-a89f-f7d5f382b821. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 678.007210] env[62109]: DEBUG oslo_concurrency.lockutils [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] Acquiring lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.041678] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.122797] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.293864] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.303993] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.304238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.375117] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.456694] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5207b198-15af-8c6c-840c-907311e62658, 'name': SearchDatastore_Task, 'duration_secs': 0.010596} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.458038] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7839b260-6d9d-4c62-a92f-67789f51bb1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.466096] env[62109]: DEBUG oslo_concurrency.lockutils [None req-319a15b7-6a37-4fc4-a6fb-c8c63fe28fe6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "18a86082-f234-44ab-81e1-b215f284385f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.466338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-319a15b7-6a37-4fc4-a6fb-c8c63fe28fe6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "18a86082-f234-44ab-81e1-b215f284385f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.466596] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 678.466596] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bb062-5b19-de3c-b26b-c044049ba61f" [ 678.466596] env[62109]: _type = "Task" [ 678.466596] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.475408] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bb062-5b19-de3c-b26b-c044049ba61f, 'name': SearchDatastore_Task, 'duration_secs': 0.008329} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.475629] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.475982] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 678.476126] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65df3993-e42d-4942-9e14-8e0840ceac2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.482588] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 678.482588] env[62109]: value = "task-1116137" [ 678.482588] env[62109]: _type = "Task" [ 678.482588] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.490152] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.627178] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-f5f24014-2196-4c44-b947-a80ac75197de" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.627178] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 678.627178] env[62109]: DEBUG nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.627178] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 678.644103] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.877676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.878521] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 678.878521] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 678.881751] env[62109]: DEBUG oslo_concurrency.lockutils [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] Acquired lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.881982] env[62109]: DEBUG nova.network.neutron [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Refreshing network info cache for port acb8c8a9-4641-44ff-a89f-f7d5f382b821 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 678.883278] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b404da05-c24c-4652-9aa1-32cdb1b7d806 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.899417] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da67896b-7a18-4429-9746-34293bb57954 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.936559] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 307cf522-173e-4bd8-8535-e4a6db6aa430 could not be found. [ 678.936766] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 678.936829] env[62109]: INFO nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Took 0.06 seconds to destroy the instance on the hypervisor. [ 678.937440] env[62109]: DEBUG oslo.service.loopingcall [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 678.937440] env[62109]: DEBUG nova.compute.manager [-] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.937440] env[62109]: DEBUG nova.network.neutron [-] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 678.962821] env[62109]: DEBUG nova.network.neutron [-] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.997608] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465144} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.998062] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 678.998444] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 678.998836] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-042cc9f3-73fc-40d0-807d-451cedc5f6c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.008419] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 679.008419] env[62109]: value = "task-1116138" [ 679.008419] env[62109]: _type = "Task" [ 679.008419] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.018249] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116138, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.079108] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7648d73-4209-4128-b5f2-84604001449b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.087111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d504a5-aa5d-4c96-bb7b-cda04034ead3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.119216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6892b378-137a-48c2-b9fa-4e05e1340064 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.126668] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5ac3b1-ef0e-4f10-bb05-8fb09d5a92f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.139737] env[62109]: DEBUG nova.compute.provider_tree [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.149193] env[62109]: DEBUG nova.network.neutron [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.403838] env[62109]: DEBUG nova.network.neutron [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.466308] env[62109]: DEBUG nova.network.neutron [-] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.486542] env[62109]: DEBUG nova.network.neutron [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.519243] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061847} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.519510] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 679.520683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0b5699-0dd9-4718-a261-92f3bac3e150 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.540224] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 679.540522] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-421f0b3d-3fc0-4ab3-816b-009a3e3e33d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.560947] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 679.560947] env[62109]: value = "task-1116139" [ 679.560947] env[62109]: _type = "Task" [ 679.560947] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.569264] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116139, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.644012] env[62109]: DEBUG nova.scheduler.client.report [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.652645] env[62109]: INFO nova.compute.manager [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: f5f24014-2196-4c44-b947-a80ac75197de] Took 1.03 seconds to deallocate network for instance. [ 679.895054] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.895307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.969239] env[62109]: INFO nova.compute.manager [-] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Took 1.03 seconds to deallocate network for instance. [ 679.971718] env[62109]: DEBUG nova.compute.claims [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 679.971899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.988609] env[62109]: DEBUG oslo_concurrency.lockutils [req-a927d22d-d92c-48da-a276-0b17132949dd req-46086ec8-7c04-476d-b633-c81a8c79e7d5 service nova] Releasing lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.067087] env[62109]: DEBUG nova.compute.manager [req-eaa546b1-066f-43c6-ae25-cc79043e514b req-8ebb0c3d-fe68-4311-9d3b-c2342be48537 service nova] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Received event network-vif-deleted-acb8c8a9-4641-44ff-a89f-f7d5f382b821 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.073363] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116139, 'name': ReconfigVM_Task, 'duration_secs': 0.432394} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.073608] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 680.074177] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b23fca34-4ef2-4f94-b739-0aeea91b5b8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.081231] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 680.081231] env[62109]: value = "task-1116140" [ 680.081231] env[62109]: _type = "Task" [ 680.081231] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.089164] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116140, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.149423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.150354] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 680.152542] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.214s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.591126] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116140, 'name': Rename_Task, 'duration_secs': 0.184615} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.591418] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 680.591659] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71e27a22-d666-4f6f-8210-4cc68bc480fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.597857] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 680.597857] env[62109]: value = "task-1116141" [ 680.597857] env[62109]: _type = "Task" [ 680.597857] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.605253] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.657824] env[62109]: DEBUG nova.compute.utils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 680.659284] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 680.659454] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 680.690373] env[62109]: INFO nova.scheduler.client.report [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted allocations for instance f5f24014-2196-4c44-b947-a80ac75197de [ 680.720781] env[62109]: DEBUG nova.policy [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da41670c89fd4656af23e3a31d661813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '115ba619f3354bfea923af53d277b691', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 680.977413] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Successfully created port: 515338fe-203e-4a77-8a19-931103ee66b6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.052523] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f090edaf-75bf-44b6-8eec-ed0e2cd9236e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.059686] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2158a21e-c7ed-4d72-8b5c-b2ad3a1f1bd0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.091022] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ffa003-396e-49d7-9f46-705edd20f514 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.096464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979e45e6-7c6c-48ae-be9b-1e2a2a1ab505 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.114334] env[62109]: DEBUG nova.compute.provider_tree [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.115787] env[62109]: DEBUG oslo_vmware.api [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116141, 'name': PowerOnVM_Task, 'duration_secs': 0.419918} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.116285] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 681.116490] env[62109]: DEBUG nova.compute.manager [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 681.117236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5b5d63-9079-4f43-9c8e-6b5d4e261b30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.165446] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 681.202375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29270756-7518-4b16-b8f5-448e1a6fd739 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "f5f24014-2196-4c44-b947-a80ac75197de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.716s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.617180] env[62109]: DEBUG nova.scheduler.client.report [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.634959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.705344] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 681.813973] env[62109]: ERROR nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 681.813973] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.813973] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.813973] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.813973] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.813973] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.813973] env[62109]: ERROR nova.compute.manager raise self.value [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.813973] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 681.813973] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.813973] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 681.814633] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.814633] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 681.814633] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 681.814633] env[62109]: ERROR nova.compute.manager [ 681.814633] env[62109]: Traceback (most recent call last): [ 681.814633] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 681.814633] env[62109]: listener.cb(fileno) [ 681.814633] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.814633] env[62109]: result = function(*args, **kwargs) [ 681.814633] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.814633] env[62109]: return func(*args, **kwargs) [ 681.814633] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.814633] env[62109]: raise e [ 681.814633] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.814633] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 681.814633] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.814633] env[62109]: created_port_ids = self._update_ports_for_instance( [ 681.814633] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.814633] env[62109]: with excutils.save_and_reraise_exception(): [ 681.814633] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.814633] env[62109]: self.force_reraise() [ 681.814633] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.814633] env[62109]: raise self.value [ 681.814633] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.814633] env[62109]: updated_port = self._update_port( [ 681.814633] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.814633] env[62109]: _ensure_no_port_binding_failure(port) [ 681.814633] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.814633] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 681.815505] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 681.815505] env[62109]: Removing descriptor: 18 [ 682.126958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.974s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.127406] env[62109]: ERROR nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Traceback (most recent call last): [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.driver.spawn(context, instance, image_meta, [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] vm_ref = self.build_virtual_machine(instance, [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] vif_infos = vmwarevif.get_vif_info(self._session, [ 682.127406] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] for vif in network_info: [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return self._sync_wrapper(fn, *args, **kwargs) [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.wait() [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self[:] = self._gt.wait() [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return self._exit_event.wait() [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] current.throw(*self._exc) [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.127762] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] result = function(*args, **kwargs) [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] return func(*args, **kwargs) [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise e [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] nwinfo = self.network_api.allocate_for_instance( [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] created_port_ids = self._update_ports_for_instance( [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] with excutils.save_and_reraise_exception(): [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] self.force_reraise() [ 682.128430] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise self.value [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] updated_port = self._update_port( [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] _ensure_no_port_binding_failure(port) [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] raise exception.PortBindingFailed(port_id=port['id']) [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] nova.exception.PortBindingFailed: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. [ 682.128784] env[62109]: ERROR nova.compute.manager [instance: 4c02989b-4638-41b8-bccb-f2605c883346] [ 682.128784] env[62109]: DEBUG nova.compute.utils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 682.129951] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.137158] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Build of instance 4c02989b-4638-41b8-bccb-f2605c883346 was re-scheduled: Binding failed for port f663db46-f083-4755-9c3f-34e9e4a7b8b3, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 682.137158] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 682.137158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquiring lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.137158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Acquired lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.137354] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 682.140368] env[62109]: DEBUG nova.compute.manager [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Received event network-changed-515338fe-203e-4a77-8a19-931103ee66b6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 682.140548] env[62109]: DEBUG nova.compute.manager [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Refreshing instance network info cache due to event network-changed-515338fe-203e-4a77-8a19-931103ee66b6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 682.140735] env[62109]: DEBUG oslo_concurrency.lockutils [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] Acquiring lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.140869] env[62109]: DEBUG oslo_concurrency.lockutils [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] Acquired lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.141032] env[62109]: DEBUG nova.network.neutron [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Refreshing network info cache for port 515338fe-203e-4a77-8a19-931103ee66b6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 682.173482] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 682.201022] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 682.201303] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 682.201478] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.201689] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 682.201842] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.201990] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 682.202227] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 682.202410] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 682.202576] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 682.203422] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 682.203422] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 682.203865] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf858d36-e99e-4845-8b8e-c51b9acc0ba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.215895] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cf373b-1922-40db-94c3-59f91a0e8947 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.234653] env[62109]: ERROR nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Traceback (most recent call last): [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] yield resources [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.driver.spawn(context, instance, image_meta, [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] vm_ref = self.build_virtual_machine(instance, [ 682.234653] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] vif_infos = vmwarevif.get_vif_info(self._session, [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] for vif in network_info: [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return self._sync_wrapper(fn, *args, **kwargs) [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.wait() [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self[:] = self._gt.wait() [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return self._exit_event.wait() [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 682.235066] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] current.throw(*self._exc) [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] result = function(*args, **kwargs) [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return func(*args, **kwargs) [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise e [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] nwinfo = self.network_api.allocate_for_instance( [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] created_port_ids = self._update_ports_for_instance( [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] with excutils.save_and_reraise_exception(): [ 682.235416] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.force_reraise() [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise self.value [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] updated_port = self._update_port( [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] _ensure_no_port_binding_failure(port) [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise exception.PortBindingFailed(port_id=port['id']) [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 682.235742] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] [ 682.235742] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Terminating instance [ 682.237132] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.240234] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.645980] env[62109]: INFO nova.compute.manager [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Rebuilding instance [ 682.667182] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.692367] env[62109]: DEBUG nova.network.neutron [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.695323] env[62109]: DEBUG nova.compute.manager [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 682.697363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60efe63b-e9d3-4753-9859-f1ffec692004 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.768290] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.858152] env[62109]: DEBUG nova.network.neutron [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.070479] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488f7e2e-c699-459e-a07f-4efcb536cd3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.078468] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f76003b-9a08-4812-9620-3df3c3a210fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.117883] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c63785-d335-4753-a010-9acbcacabbc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.125435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084c6c98-edc8-4948-9fa2-35d7fe4aa5ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.140499] env[62109]: DEBUG nova.compute.provider_tree [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.213754] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 683.214180] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38cdd416-cf35-4243-9679-80d2c8f926bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.221565] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 683.221565] env[62109]: value = "task-1116142" [ 683.221565] env[62109]: _type = "Task" [ 683.221565] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.230150] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.270343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Releasing lock "refresh_cache-4c02989b-4638-41b8-bccb-f2605c883346" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.270608] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 683.270852] env[62109]: DEBUG nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 683.271051] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 683.298653] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.361374] env[62109]: DEBUG oslo_concurrency.lockutils [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] Releasing lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.361636] env[62109]: DEBUG nova.compute.manager [req-5c6894a0-4869-4641-a7d9-94ad045d4359 req-8704d7b7-286f-45ff-bb85-3f7f3b0ad4d8 service nova] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Received event network-vif-deleted-515338fe-203e-4a77-8a19-931103ee66b6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 683.362014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.362189] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 683.642565] env[62109]: DEBUG nova.scheduler.client.report [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.732307] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116142, 'name': PowerOffVM_Task, 'duration_secs': 0.211835} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.732657] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 683.732884] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 683.733978] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945293bd-8ed0-4f23-b0ef-f51427cdd1dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.740631] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 683.740868] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8327fa6-ea4d-401d-9e83-c3714035fbce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.766732] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 683.766926] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 683.767303] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Deleting the datastore file [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.767393] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b522fb1-840f-4691-b6f1-580cbbe094b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.773987] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 683.773987] env[62109]: value = "task-1116144" [ 683.773987] env[62109]: _type = "Task" [ 683.773987] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.781798] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.800900] env[62109]: DEBUG nova.network.neutron [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.883654] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.997071] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.151385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.021s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.151929] env[62109]: ERROR nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Traceback (most recent call last): [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.driver.spawn(context, instance, image_meta, [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] vm_ref = self.build_virtual_machine(instance, [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.151929] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] for vif in network_info: [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self._sync_wrapper(fn, *args, **kwargs) [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.wait() [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self[:] = self._gt.wait() [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self._exit_event.wait() [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] result = hub.switch() [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.152305] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return self.greenlet.switch() [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] result = function(*args, **kwargs) [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] return func(*args, **kwargs) [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise e [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] nwinfo = self.network_api.allocate_for_instance( [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] created_port_ids = self._update_ports_for_instance( [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] with excutils.save_and_reraise_exception(): [ 684.152763] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] self.force_reraise() [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise self.value [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] updated_port = self._update_port( [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] _ensure_no_port_binding_failure(port) [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] raise exception.PortBindingFailed(port_id=port['id']) [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] nova.exception.PortBindingFailed: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. [ 684.153250] env[62109]: ERROR nova.compute.manager [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] [ 684.153572] env[62109]: DEBUG nova.compute.utils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 684.153870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.435s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.156041] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Build of instance 7ef14775-9be3-4275-b5ca-dad43aa61dd3 was re-scheduled: Binding failed for port 181c75f9-e3b3-4820-9c7c-f3363ff5d514, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 684.156460] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 684.156682] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquiring lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.156827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Acquired lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.156982] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 684.252837] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.253128] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.283228] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089067} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.283463] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.283667] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 684.283859] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 684.303468] env[62109]: INFO nova.compute.manager [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] [instance: 4c02989b-4638-41b8-bccb-f2605c883346] Took 1.03 seconds to deallocate network for instance. [ 684.500056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.500505] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 684.500718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 684.501014] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ffa5830-e8b3-4451-b622-9ebb5e3a6748 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.510526] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e3da65-2a55-41b2-886b-c4423c1388a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.533499] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02f52fdd-ece0-43a5-b7fd-be4172093698 could not be found. [ 684.533717] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 684.533896] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Took 0.03 seconds to destroy the instance on the hypervisor. [ 684.534156] env[62109]: DEBUG oslo.service.loopingcall [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 684.534382] env[62109]: DEBUG nova.compute.manager [-] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.534474] env[62109]: DEBUG nova.network.neutron [-] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.547910] env[62109]: DEBUG nova.network.neutron [-] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.675870] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.749352] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.050608] env[62109]: DEBUG nova.network.neutron [-] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.254214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Releasing lock "refresh_cache-7ef14775-9be3-4275-b5ca-dad43aa61dd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.254489] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 685.254489] env[62109]: DEBUG nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 685.254757] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 685.274619] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.316437] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 685.316682] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 685.316864] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.317084] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 685.317238] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.317385] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 685.317586] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 685.317742] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 685.317905] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 685.318083] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 685.318260] env[62109]: DEBUG nova.virt.hardware [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 685.319356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618bd4d7-a745-4b59-a7fb-b33430c8dad1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.327129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae408196-904f-40e2-bfb0-6e29ce2d83bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.331643] env[62109]: INFO nova.scheduler.client.report [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Deleted allocations for instance 4c02989b-4638-41b8-bccb-f2605c883346 [ 685.346805] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.352387] env[62109]: DEBUG oslo.service.loopingcall [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.353256] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 685.353479] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-478c44b3-c829-460e-88c4-42e8004989db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.371569] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.371569] env[62109]: value = "task-1116145" [ 685.371569] env[62109]: _type = "Task" [ 685.371569] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.379449] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116145, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.553059] env[62109]: INFO nova.compute.manager [-] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Took 1.02 seconds to deallocate network for instance. [ 685.556108] env[62109]: DEBUG nova.compute.claims [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 685.556300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.686931] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 4c02989b-4638-41b8-bccb-f2605c883346 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.687119] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.776848] env[62109]: DEBUG nova.network.neutron [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.839549] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eefe05-f67e-4e89-b53e-6f52683ef837 tempest-ServersTestBootFromVolume-496527689 tempest-ServersTestBootFromVolume-496527689-project-member] Lock "4c02989b-4638-41b8-bccb-f2605c883346" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.670s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.884784] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116145, 'name': CreateVM_Task, 'duration_secs': 0.223829} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.884999] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 685.885458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.885694] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.886060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 685.886338] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb0efb04-950e-458e-8d66-3d1807a0acf7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.891122] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 685.891122] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c55915-6446-4150-b696-c580e70b2176" [ 685.891122] env[62109]: _type = "Task" [ 685.891122] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.898753] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c55915-6446-4150-b696-c580e70b2176, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.190541] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7ef14775-9be3-4275-b5ca-dad43aa61dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.190665] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 17fd1633-f327-47e9-905f-60c8c7446c7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 686.190713] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 307cf522-173e-4bd8-8535-e4a6db6aa430 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 686.190810] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 02f52fdd-ece0-43a5-b7fd-be4172093698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 686.279519] env[62109]: INFO nova.compute.manager [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] [instance: 7ef14775-9be3-4275-b5ca-dad43aa61dd3] Took 1.02 seconds to deallocate network for instance. [ 686.342162] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 686.402187] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c55915-6446-4150-b696-c580e70b2176, 'name': SearchDatastore_Task, 'duration_secs': 0.007848} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.402540] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.402787] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.403022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.403184] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.403340] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.403599] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-455ec499-ff3c-4c9a-aeb5-66fbaadfcc7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.411192] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.411367] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 686.412344] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-607586e2-b4fc-4fe5-8767-c8267fce10a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.417314] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 686.417314] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fe8cb7-215a-23c1-30ca-5601e0892cfe" [ 686.417314] env[62109]: _type = "Task" [ 686.417314] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.427444] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fe8cb7-215a-23c1-30ca-5601e0892cfe, 'name': SearchDatastore_Task} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.427444] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c144638-0fb6-41d7-9e2b-89c8624e3c7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.431595] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 686.431595] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524aea38-6ca9-e6f8-d42e-0102fd3bd2f8" [ 686.431595] env[62109]: _type = "Task" [ 686.431595] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.439720] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524aea38-6ca9-e6f8-d42e-0102fd3bd2f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.694634] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 17ee49a9-d980-46c0-996e-6a43c80be434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.865613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.943494] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524aea38-6ca9-e6f8-d42e-0102fd3bd2f8, 'name': SearchDatastore_Task, 'duration_secs': 0.007501} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.943762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.944021] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 686.944285] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4620653-6a8b-4eab-80fa-e877dbebea45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.951089] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 686.951089] env[62109]: value = "task-1116147" [ 686.951089] env[62109]: _type = "Task" [ 686.951089] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.959013] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.198674] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.314847] env[62109]: INFO nova.scheduler.client.report [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Deleted allocations for instance 7ef14775-9be3-4275-b5ca-dad43aa61dd3 [ 687.468018] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116147, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426993} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.468018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 687.468018] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 687.468018] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a113129-6c50-4dcf-b855-5d45941d9a39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.475036] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 687.475036] env[62109]: value = "task-1116148" [ 687.475036] env[62109]: _type = "Task" [ 687.475036] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.482335] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.702343] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance d727d597-c4ac-426e-bdc3-fc4f73a3eac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.824209] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf8cd3d-60fa-4b51-b4bc-dc6dca0aa3aa tempest-TenantUsagesTestJSON-1167164114 tempest-TenantUsagesTestJSON-1167164114-project-member] Lock "7ef14775-9be3-4275-b5ca-dad43aa61dd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.705s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.985145] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062856} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.985475] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 687.987478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4bbc60-3ac3-4520-b7a5-d0dc6a63b7bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.007537] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.007824] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b7b9f18-49c4-4609-96d5-8d4ec5015ecf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.027887] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 688.027887] env[62109]: value = "task-1116149" [ 688.027887] env[62109]: _type = "Task" [ 688.027887] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.037517] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116149, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.207023] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.328251] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 688.538553] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116149, 'name': ReconfigVM_Task, 'duration_secs': 0.251647} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.538930] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1/934e3a1b-8d3f-4de0-ae8b-35b82d3859a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.539890] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1295127c-e77c-4425-a33c-4773d4889504 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.545996] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 688.545996] env[62109]: value = "task-1116150" [ 688.545996] env[62109]: _type = "Task" [ 688.545996] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.554371] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116150, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.711864] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance f6d3a50c-bcc3-4a6f-969f-4e629646f427 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.860101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.056342] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116150, 'name': Rename_Task, 'duration_secs': 0.128765} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.056486] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 689.057104] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21589aaf-1761-4a9e-ad06-4c63a8f2004e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.062916] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Waiting for the task: (returnval){ [ 689.062916] env[62109]: value = "task-1116151" [ 689.062916] env[62109]: _type = "Task" [ 689.062916] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.072602] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.216580] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.573697] env[62109]: DEBUG oslo_vmware.api [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Task: {'id': task-1116151, 'name': PowerOnVM_Task, 'duration_secs': 0.42261} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.573989] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 689.574208] env[62109]: DEBUG nova.compute.manager [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 689.574993] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff87bee6-ce25-476c-be7c-e632435a9e90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.720937] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c753a2db-d701-4508-88bd-4ebe4f32a075 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.091396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.233862] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1399f618-3a93-4731-a59b-f98306d6cd52 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.389256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.389256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.389256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.389256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.389452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.390769] env[62109]: INFO nova.compute.manager [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Terminating instance [ 690.393428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "refresh_cache-934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.393585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquired lock "refresh_cache-934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.393751] env[62109]: DEBUG nova.network.neutron [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 690.742576] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 55381bef-dab5-44cd-97fe-9fc75ab61d0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.919637] env[62109]: DEBUG nova.network.neutron [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.006238] env[62109]: DEBUG nova.network.neutron [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.246021] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a9fb75d5-e303-4f31-888d-528963ab23b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.510432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Releasing lock "refresh_cache-934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.510856] env[62109]: DEBUG nova.compute.manager [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 691.511064] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 691.511954] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02b7d76-2cf4-412e-ab78-2b3f8f2f1fee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.521542] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 691.521542] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de96a52f-af73-4be9-8a31-22a8e6d890ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.527130] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 691.527130] env[62109]: value = "task-1116152" [ 691.527130] env[62109]: _type = "Task" [ 691.527130] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.537350] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116152, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.751502] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 028300fd-f9f8-4606-a39e-53582f830eeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.037846] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116152, 'name': PowerOffVM_Task, 'duration_secs': 0.12715} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.038188] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 692.038387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 692.039252] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72f8f353-aab8-48cd-89b2-dabbb489eefa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.061959] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 692.062196] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 692.062378] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleting the datastore file [datastore2] 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 692.062633] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8075eb1b-10d8-4ed7-8eb8-0d81600ce22b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.069120] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for the task: (returnval){ [ 692.069120] env[62109]: value = "task-1116154" [ 692.069120] env[62109]: _type = "Task" [ 692.069120] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.078280] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.254266] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1aaa9eae-9183-49d7-a452-4345ad2a9aa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.579311] env[62109]: DEBUG oslo_vmware.api [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Task: {'id': task-1116154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10556} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.579647] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 692.579876] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 692.580129] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 692.580367] env[62109]: INFO nova.compute.manager [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 692.580662] env[62109]: DEBUG oslo.service.loopingcall [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.580917] env[62109]: DEBUG nova.compute.manager [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 692.581074] env[62109]: DEBUG nova.network.neutron [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 692.597482] env[62109]: DEBUG nova.network.neutron [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.757481] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8584eb2c-57a3-455e-9d3c-877286e23ccc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.926389] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.927087] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.099651] env[62109]: DEBUG nova.network.neutron [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.263029] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8b6ec904-8c68-4eaa-94fe-47a87528e26b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.603188] env[62109]: INFO nova.compute.manager [-] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Took 1.02 seconds to deallocate network for instance. [ 693.765341] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.109317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.268030] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c90ace77-5b8b-4b04-aa57-d47ad17df01e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.771427] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c44d618e-c781-47ba-b191-cecc01dcfe9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.275028] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7f40cdc8-3421-47b7-b148-ff6417105dbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.778070] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.280617] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 32cccd30-278c-48b6-8855-5cd76c2da057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.783620] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.287176] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.790866] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c5c63ece-611d-45d1-a8e6-9327700f1563 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.294438] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 9b2968bb-ed06-4740-b43e-b4aa1fac76dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.798558] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 59f6adc7-d491-4a86-83f7-89128511e00f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.301951] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7afbb35b-9865-40a7-8b37-d6a661a186a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.301951] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 699.301951] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 699.634058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0d587c-f98e-432b-b0e0-eeb51298bca9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.641773] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f7bca1-0112-4c9c-8401-79e9c0e13be3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.671605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a565d0ab-1cc4-41e4-b065-4107b1ee6cbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.678506] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c9d3bd-ecbd-4172-9804-a7926e4de1ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.690938] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.193718] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 700.698940] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 700.698940] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 16.545s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.699166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.923s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.700653] env[62109]: INFO nova.compute.claims [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.031478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9127070-9d74-49db-8bd3-dd264dbd8922 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.039493] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdd1e37-cfbc-49ab-97ca-f79c98507d09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.070621] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046337ae-b39b-484f-ba71-7850dd4e82a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.078344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a4c36a-5000-4da7-8678-361265891675 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.091882] env[62109]: DEBUG nova.compute.provider_tree [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.595057] env[62109]: DEBUG nova.scheduler.client.report [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.099886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.100446] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 703.103068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.175s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.610282] env[62109]: DEBUG nova.compute.utils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.611759] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 703.611923] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.675950] env[62109]: DEBUG nova.policy [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da41670c89fd4656af23e3a31d661813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '115ba619f3354bfea923af53d277b691', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.964745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d23f937-3adf-4d28-a768-8937f2ae2e68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.971890] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Successfully created port: 6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.974269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aa2f95-1592-45b9-9c49-ac70e2b56ab9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.004302] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ef83f1-a10c-4372-a815-38d53248a9e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.011428] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f46d91f-7b2b-4e36-800b-16abccb2641c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.024253] env[62109]: DEBUG nova.compute.provider_tree [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.117722] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 704.528085] env[62109]: DEBUG nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 705.032428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.929s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.033110] env[62109]: ERROR nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Traceback (most recent call last): [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.driver.spawn(context, instance, image_meta, [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] vm_ref = self.build_virtual_machine(instance, [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.033110] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] for vif in network_info: [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return self._sync_wrapper(fn, *args, **kwargs) [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.wait() [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self[:] = self._gt.wait() [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return self._exit_event.wait() [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] current.throw(*self._exc) [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.033468] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] result = function(*args, **kwargs) [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] return func(*args, **kwargs) [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise e [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] nwinfo = self.network_api.allocate_for_instance( [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] created_port_ids = self._update_ports_for_instance( [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] with excutils.save_and_reraise_exception(): [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] self.force_reraise() [ 705.033852] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise self.value [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] updated_port = self._update_port( [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] _ensure_no_port_binding_failure(port) [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] raise exception.PortBindingFailed(port_id=port['id']) [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] nova.exception.PortBindingFailed: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. [ 705.034239] env[62109]: ERROR nova.compute.manager [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] [ 705.034239] env[62109]: DEBUG nova.compute.utils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 705.035474] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.054s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.036919] env[62109]: INFO nova.compute.claims [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.040547] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Build of instance 17fd1633-f327-47e9-905f-60c8c7446c7e was re-scheduled: Binding failed for port c1a7f1e1-bc39-4b71-a227-069264e706de, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 705.040945] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 705.041183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquiring lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.041331] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Acquired lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.041490] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.129605] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 705.154423] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 705.154724] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 705.154951] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.155161] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 705.155314] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.155464] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 705.155670] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 705.155932] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 705.156200] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 705.156392] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 705.156570] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.158151] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b92ba1-32d3-4e18-b17f-e37c82c8b036 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.166504] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b722ec16-4221-4811-9618-9230ba3cc68c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.559235] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.632809] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.871764] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Successfully updated port: 6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.928654] env[62109]: DEBUG nova.compute.manager [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Received event network-vif-plugged-6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 705.928857] env[62109]: DEBUG oslo_concurrency.lockutils [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] Acquiring lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.929075] env[62109]: DEBUG oslo_concurrency.lockutils [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] Lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.929245] env[62109]: DEBUG oslo_concurrency.lockutils [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] Lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.929410] env[62109]: DEBUG nova.compute.manager [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] No waiting events found dispatching network-vif-plugged-6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 705.929583] env[62109]: WARNING nova.compute.manager [req-109b3fd7-b486-4463-af2e-9bec3d3a2807 req-4f10b445-8a81-4a71-8d4b-1a71ce0d94f5 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Received unexpected event network-vif-plugged-6f0fba83-886e-4b4d-80b4-14d23a8d48f3 for instance with vm_state building and task_state spawning. [ 706.134897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Releasing lock "refresh_cache-17fd1633-f327-47e9-905f-60c8c7446c7e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.135158] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 706.135352] env[62109]: DEBUG nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.135534] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.151460] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.373346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.373496] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.373645] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.418128] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cd7789-f207-45bc-be03-eb227463aa3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.425713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099c7a27-5837-4454-a223-e1a626f32553 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.457287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f83f39-ec97-47ad-bc54-147b9dacaf40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.464327] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa4d071-21e7-4e0b-ab92-26815093d8a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.477181] env[62109]: DEBUG nova.compute.provider_tree [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.654975] env[62109]: DEBUG nova.network.neutron [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.924235] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.979954] env[62109]: DEBUG nova.scheduler.client.report [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 707.157302] env[62109]: INFO nova.compute.manager [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] [instance: 17fd1633-f327-47e9-905f-60c8c7446c7e] Took 1.02 seconds to deallocate network for instance. [ 707.160370] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Updating instance_info_cache with network_info: [{"id": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "address": "fa:16:3e:de:bc:be", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f0fba83-88", "ovs_interfaceid": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.485329] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.486062] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 707.488382] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.884s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.490130] env[62109]: INFO nova.compute.claims [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.665638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.665917] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Instance network_info: |[{"id": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "address": "fa:16:3e:de:bc:be", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f0fba83-88", "ovs_interfaceid": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 707.666804] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:bc:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f0fba83-886e-4b4d-80b4-14d23a8d48f3', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.674392] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Creating folder: Project (115ba619f3354bfea923af53d277b691). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.674831] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d70e09bc-47a3-4fa5-9188-02c32f5c7f5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.686013] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Created folder: Project (115ba619f3354bfea923af53d277b691) in parent group-v244329. [ 707.686199] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Creating folder: Instances. Parent ref: group-v244346. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.686414] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f19543b-4c7a-48e0-b1c0-c90dab5fc8d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.695164] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Created folder: Instances in parent group-v244346. [ 707.695376] env[62109]: DEBUG oslo.service.loopingcall [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.695882] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.696750] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92073838-2ba7-4ed8-b113-34ef20c1e818 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.714849] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.714849] env[62109]: value = "task-1116157" [ 707.714849] env[62109]: _type = "Task" [ 707.714849] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.722017] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116157, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.995086] env[62109]: DEBUG nova.compute.utils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.999307] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.999527] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 708.003884] env[62109]: DEBUG nova.compute.manager [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Received event network-changed-6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 708.003884] env[62109]: DEBUG nova.compute.manager [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Refreshing instance network info cache due to event network-changed-6f0fba83-886e-4b4d-80b4-14d23a8d48f3. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 708.004981] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] Acquiring lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.004981] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] Acquired lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.004981] env[62109]: DEBUG nova.network.neutron [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Refreshing network info cache for port 6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.064616] env[62109]: DEBUG nova.policy [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da41670c89fd4656af23e3a31d661813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '115ba619f3354bfea923af53d277b691', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 708.216441] env[62109]: INFO nova.scheduler.client.report [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Deleted allocations for instance 17fd1633-f327-47e9-905f-60c8c7446c7e [ 708.231414] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116157, 'name': CreateVM_Task, 'duration_secs': 0.323831} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.231580] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 708.240984] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.241178] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.241488] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 708.241976] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cd92e68-94f6-425d-a411-fb5946be31f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.246841] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 708.246841] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5205ec27-1912-ddae-b838-8a7290b0cd54" [ 708.246841] env[62109]: _type = "Task" [ 708.246841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.255235] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5205ec27-1912-ddae-b838-8a7290b0cd54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.399349] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Successfully created port: 5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.500839] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 708.727274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6a5d80e-c528-4764-8002-05a1ea6557bf tempest-FloatingIPsAssociationTestJSON-1898921049 tempest-FloatingIPsAssociationTestJSON-1898921049-project-member] Lock "17fd1633-f327-47e9-905f-60c8c7446c7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.122s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.757304] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5205ec27-1912-ddae-b838-8a7290b0cd54, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.759737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.759895] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.760144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.760289] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.760464] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.760904] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b971b7e-a0be-44ba-84b6-0be2d55ebb7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.768703] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.768878] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 708.769578] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e57b4ce9-b467-479d-aefb-54bbcb898a25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.776874] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 708.776874] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96196-47dd-6751-42fe-99a9605dbd36" [ 708.776874] env[62109]: _type = "Task" [ 708.776874] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.784592] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96196-47dd-6751-42fe-99a9605dbd36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.820744] env[62109]: DEBUG nova.network.neutron [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Updated VIF entry in instance network info cache for port 6f0fba83-886e-4b4d-80b4-14d23a8d48f3. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 708.821093] env[62109]: DEBUG nova.network.neutron [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Updating instance_info_cache with network_info: [{"id": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "address": "fa:16:3e:de:bc:be", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f0fba83-88", "ovs_interfaceid": "6f0fba83-886e-4b4d-80b4-14d23a8d48f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.907237] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb51160a-be83-426e-9406-6ebbd7846e31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.914551] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a14e99-e7ae-4ba8-a390-3ea1ee839c30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.944092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd6bf9d-4e6b-4379-b40d-a278cd624e53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.951267] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248fbd58-e4ff-4a23-8396-96687bcd4300 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.964758] env[62109]: DEBUG nova.compute.provider_tree [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.233248] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 709.287396] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96196-47dd-6751-42fe-99a9605dbd36, 'name': SearchDatastore_Task, 'duration_secs': 0.008258} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.288027] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37fb1243-8e39-4806-98a9-385fe9bd5bef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.293230] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 709.293230] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5256f06e-001e-2e95-7166-f626189cb236" [ 709.293230] env[62109]: _type = "Task" [ 709.293230] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.300523] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5256f06e-001e-2e95-7166-f626189cb236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.323585] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dad159e-4741-4425-bbb2-5811d4c3e10d req-ef47304a-d54a-4dd7-b6e1-987ee55059c4 service nova] Releasing lock "refresh_cache-17ee49a9-d980-46c0-996e-6a43c80be434" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.468551] env[62109]: DEBUG nova.scheduler.client.report [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 709.509736] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 709.537016] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.537016] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.537016] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.537535] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.537808] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.538496] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.538496] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.538626] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.539634] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.539634] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.539634] env[62109]: DEBUG nova.virt.hardware [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.539822] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a40c2e5-4660-4203-b4de-3f2ea2b7d608 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.548306] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5885bc94-5d24-46a7-9915-1979fc7f275b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.759957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.804598] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5256f06e-001e-2e95-7166-f626189cb236, 'name': SearchDatastore_Task, 'duration_secs': 0.009491} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.804854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.805188] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 17ee49a9-d980-46c0-996e-6a43c80be434/17ee49a9-d980-46c0-996e-6a43c80be434.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 709.805855] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b90232d4-040a-4e75-bb50-d555b25977db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.812130] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 709.812130] env[62109]: value = "task-1116158" [ 709.812130] env[62109]: _type = "Task" [ 709.812130] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.819624] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.975365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.975365] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 709.983181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.398s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.984833] env[62109]: INFO nova.compute.claims [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.025732] env[62109]: DEBUG nova.compute.manager [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Received event network-vif-plugged-5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.025973] env[62109]: DEBUG oslo_concurrency.lockutils [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] Acquiring lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.026275] env[62109]: DEBUG oslo_concurrency.lockutils [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.026397] env[62109]: DEBUG oslo_concurrency.lockutils [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.026624] env[62109]: DEBUG nova.compute.manager [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] No waiting events found dispatching network-vif-plugged-5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 710.026798] env[62109]: WARNING nova.compute.manager [req-15db5e31-5696-4bb1-af47-99ef688ae377 req-7d7641b2-0f34-4029-8b2d-a88036e38d61 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Received unexpected event network-vif-plugged-5ddcdbcd-b248-4185-acdb-ef37c8631a5c for instance with vm_state building and task_state spawning. [ 710.111302] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Successfully updated port: 5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.322576] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116158, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459483} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.322875] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 17ee49a9-d980-46c0-996e-6a43c80be434/17ee49a9-d980-46c0-996e-6a43c80be434.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 710.324220] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.324220] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61c8228a-934c-4bbd-bd00-9a1b1cb244fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.331722] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 710.331722] env[62109]: value = "task-1116159" [ 710.331722] env[62109]: _type = "Task" [ 710.331722] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.341424] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.493813] env[62109]: DEBUG nova.compute.utils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.493813] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 710.493813] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.586202] env[62109]: DEBUG nova.policy [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e608055854844801b9f7c51d07820917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca12aa68e4b4d4d8cf1e3332deb44f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 710.618611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.618611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.618611] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 710.843893] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107121} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.845410] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.847505] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96a2e07-b362-45cb-9871-fd9ad43526ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.876656] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 17ee49a9-d980-46c0-996e-6a43c80be434/17ee49a9-d980-46c0-996e-6a43c80be434.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.877645] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d26ac59-c6fb-4678-aea9-58e6b408efc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.898022] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 710.898022] env[62109]: value = "task-1116160" [ 710.898022] env[62109]: _type = "Task" [ 710.898022] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.907880] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.997223] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 711.000690] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Successfully created port: 05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.162107] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.361954] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Updating instance_info_cache with network_info: [{"id": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "address": "fa:16:3e:35:c2:b4", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddcdbcd-b2", "ovs_interfaceid": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.412382] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116160, 'name': ReconfigVM_Task, 'duration_secs': 0.336463} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.412685] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 17ee49a9-d980-46c0-996e-6a43c80be434/17ee49a9-d980-46c0-996e-6a43c80be434.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.413767] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8c49138-adb5-4142-ad0a-f9787e6a4da4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.424821] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 711.424821] env[62109]: value = "task-1116161" [ 711.424821] env[62109]: _type = "Task" [ 711.424821] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.432076] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116161, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.482655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96178e3-74f5-4427-9543-a87d92b0a064 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.490293] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d2b37f-e5d2-4de2-a4b6-2bdb32602c09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.535293] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40aaced-6e52-40a5-8d72-3adb865edeab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.540232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12966b49-0063-483d-9794-e7f8438b5f6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.553897] env[62109]: DEBUG nova.compute.provider_tree [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.865223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.865558] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Instance network_info: |[{"id": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "address": "fa:16:3e:35:c2:b4", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddcdbcd-b2", "ovs_interfaceid": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 711.865977] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:c2:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ddcdbcd-b248-4185-acdb-ef37c8631a5c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.874033] env[62109]: DEBUG oslo.service.loopingcall [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.874033] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 711.874033] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a31ad4b3-2b1a-4823-a8b5-c3447d2c676d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.894196] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.894196] env[62109]: value = "task-1116162" [ 711.894196] env[62109]: _type = "Task" [ 711.894196] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.901659] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116162, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.933741] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116161, 'name': Rename_Task, 'duration_secs': 0.172284} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.934211] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 711.934554] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb7ad5c6-73d1-4b7c-94f2-dd578e909abf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.940716] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 711.940716] env[62109]: value = "task-1116163" [ 711.940716] env[62109]: _type = "Task" [ 711.940716] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.950372] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.037014] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 712.057288] env[62109]: DEBUG nova.scheduler.client.report [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.061105] env[62109]: DEBUG nova.compute.manager [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Received event network-changed-5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.061339] env[62109]: DEBUG nova.compute.manager [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Refreshing instance network info cache due to event network-changed-5ddcdbcd-b248-4185-acdb-ef37c8631a5c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 712.061507] env[62109]: DEBUG oslo_concurrency.lockutils [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] Acquiring lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.061644] env[62109]: DEBUG oslo_concurrency.lockutils [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] Acquired lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.061801] env[62109]: DEBUG nova.network.neutron [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Refreshing network info cache for port 5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 712.070580] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.070809] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.070963] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.071152] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.071295] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.071439] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.071643] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.071800] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.071963] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.072145] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.072317] env[62109]: DEBUG nova.virt.hardware [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.073421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38222c0-bb66-4f9f-b39f-c8549428325c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.082486] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2e7115-a4ec-4c20-9a52-8cc411e76d15 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.403966] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116162, 'name': CreateVM_Task, 'duration_secs': 0.342336} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.404221] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 712.404883] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.405843] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.405843] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 712.405843] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d5e326-4772-456a-aec0-582e5effa571 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.410190] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 712.410190] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96c29-ff43-8c0a-d18a-557198c897c4" [ 712.410190] env[62109]: _type = "Task" [ 712.410190] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.417959] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96c29-ff43-8c0a-d18a-557198c897c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.449344] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116163, 'name': PowerOnVM_Task, 'duration_secs': 0.45633} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.449344] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 712.449533] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Took 7.32 seconds to spawn the instance on the hypervisor. [ 712.449713] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 712.450491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b70ba2-8845-406d-8e5a-2df5547ff27c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.566522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.567040] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 712.572203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.600s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.795790] env[62109]: DEBUG nova.network.neutron [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Updated VIF entry in instance network info cache for port 5ddcdbcd-b248-4185-acdb-ef37c8631a5c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 712.796272] env[62109]: DEBUG nova.network.neutron [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Updating instance_info_cache with network_info: [{"id": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "address": "fa:16:3e:35:c2:b4", "network": {"id": "cb6862a6-f029-4a95-89ed-ef3cf74f038a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1323521961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "115ba619f3354bfea923af53d277b691", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddcdbcd-b2", "ovs_interfaceid": "5ddcdbcd-b248-4185-acdb-ef37c8631a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.924860] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f96c29-ff43-8c0a-d18a-557198c897c4, 'name': SearchDatastore_Task, 'duration_secs': 0.008933} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.925274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.925508] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.925744] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.925886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.926109] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.926531] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c188fc35-b5ba-448a-aeb0-d1c4bac1bb7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.935401] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.935581] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 712.940169] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84e77e07-6d2a-43a8-a5f1-5df2ba6c15e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.949169] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 712.949169] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0603c-efe5-85f8-cc27-23b0803e573c" [ 712.949169] env[62109]: _type = "Task" [ 712.949169] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.958060] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0603c-efe5-85f8-cc27-23b0803e573c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.970351] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Took 43.22 seconds to build instance. [ 712.975587] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Successfully updated port: 05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.085601] env[62109]: DEBUG nova.compute.utils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.087262] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 713.087262] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 713.135522] env[62109]: DEBUG nova.policy [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 713.299780] env[62109]: DEBUG oslo_concurrency.lockutils [req-66b48d6d-9de5-42f2-a390-6f2ee18ad02c req-4aaf8c4c-0b8b-494b-ba89-d0925e52f329 service nova] Releasing lock "refresh_cache-46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.438575] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4759bfe7-0a30-42f5-8661-983036377a34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.446287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4b9591-a963-438e-a0e9-dd7c543419ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.459958] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0603c-efe5-85f8-cc27-23b0803e573c, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.486662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.690s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.487274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.487390] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.487596] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.489186] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adf22ce9-4a06-4309-a0df-c41325df6c26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.493729] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e55d46d-a4e9-43d9-af83-17e11bfc1f32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.498528] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Successfully created port: a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.505018] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 713.505018] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d77150-a904-ac4a-7b9f-7b2158577686" [ 713.505018] env[62109]: _type = "Task" [ 713.505018] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.506229] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1182ae48-b48c-4055-add7-40861abdebb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.522774] env[62109]: DEBUG nova.compute.provider_tree [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.527417] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d77150-a904-ac4a-7b9f-7b2158577686, 'name': SearchDatastore_Task, 'duration_secs': 0.009154} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.527841] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.528098] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371/46aa78cc-ea0a-4c1b-aadb-f2a4856c9371.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 713.528342] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a9236ba-0eb4-43d7-be38-7f970fad3279 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.534709] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 713.534709] env[62109]: value = "task-1116164" [ 713.534709] env[62109]: _type = "Task" [ 713.534709] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.542414] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.590098] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 713.997960] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 714.028159] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.030694] env[62109]: DEBUG nova.scheduler.client.report [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.049375] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497159} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.049627] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371/46aa78cc-ea0a-4c1b-aadb-f2a4856c9371.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 714.049836] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.050095] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6bc29ee-78a4-4afb-be45-5fea8e906b00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.061022] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 714.061022] env[62109]: value = "task-1116165" [ 714.061022] env[62109]: _type = "Task" [ 714.061022] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.070652] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.102158] env[62109]: DEBUG nova.compute.manager [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Received event network-vif-plugged-05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 714.102158] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Acquiring lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.102369] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.102558] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.102736] env[62109]: DEBUG nova.compute.manager [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] No waiting events found dispatching network-vif-plugged-05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 714.102926] env[62109]: WARNING nova.compute.manager [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Received unexpected event network-vif-plugged-05de4996-e885-4ff3-9685-950d1d793e43 for instance with vm_state building and task_state spawning. [ 714.103565] env[62109]: DEBUG nova.compute.manager [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Received event network-changed-05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 714.106019] env[62109]: DEBUG nova.compute.manager [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Refreshing instance network info cache due to event network-changed-05de4996-e885-4ff3-9685-950d1d793e43. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 714.106019] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Acquiring lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.177306] env[62109]: DEBUG nova.network.neutron [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Updating instance_info_cache with network_info: [{"id": "05de4996-e885-4ff3-9685-950d1d793e43", "address": "fa:16:3e:5a:cc:47", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05de4996-e8", "ovs_interfaceid": "05de4996-e885-4ff3-9685-950d1d793e43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.520372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.535333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.963s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.535935] env[62109]: ERROR nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Traceback (most recent call last): [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.driver.spawn(context, instance, image_meta, [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] vm_ref = self.build_virtual_machine(instance, [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.535935] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] for vif in network_info: [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self._sync_wrapper(fn, *args, **kwargs) [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.wait() [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self[:] = self._gt.wait() [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self._exit_event.wait() [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] result = hub.switch() [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.536382] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return self.greenlet.switch() [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] result = function(*args, **kwargs) [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] return func(*args, **kwargs) [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise e [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] nwinfo = self.network_api.allocate_for_instance( [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] created_port_ids = self._update_ports_for_instance( [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] with excutils.save_and_reraise_exception(): [ 714.536755] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] self.force_reraise() [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise self.value [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] updated_port = self._update_port( [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] _ensure_no_port_binding_failure(port) [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] raise exception.PortBindingFailed(port_id=port['id']) [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] nova.exception.PortBindingFailed: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. [ 714.537150] env[62109]: ERROR nova.compute.manager [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] [ 714.537892] env[62109]: DEBUG nova.compute.utils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 714.537892] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.905s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.537961] env[62109]: DEBUG nova.objects.instance [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 714.540322] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Build of instance 307cf522-173e-4bd8-8535-e4a6db6aa430 was re-scheduled: Binding failed for port acb8c8a9-4641-44ff-a89f-f7d5f382b821, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 714.540729] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 714.540939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.541092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.541246] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.568990] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067916} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.569398] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 714.569974] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6b1ad3-8092-45e3-89b8-1527d80aba2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.591952] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371/46aa78cc-ea0a-4c1b-aadb-f2a4856c9371.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 714.592423] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b5ea543-a710-4f24-95c4-a850ea2629d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.607288] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 714.614110] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 714.614110] env[62109]: value = "task-1116166" [ 714.614110] env[62109]: _type = "Task" [ 714.614110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.622447] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.630941] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.631199] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 714.631367] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.631576] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 714.631702] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.631846] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 714.632060] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 714.632255] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 714.632484] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 714.632690] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 714.632879] env[62109]: DEBUG nova.virt.hardware [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 714.633649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac1861a-9b19-42fc-ac5d-201edf650a0e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.640857] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3c8bef-c5a6-479b-b49c-b26fc5d1d57b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.679490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.679822] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance network_info: |[{"id": "05de4996-e885-4ff3-9685-950d1d793e43", "address": "fa:16:3e:5a:cc:47", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05de4996-e8", "ovs_interfaceid": "05de4996-e885-4ff3-9685-950d1d793e43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 714.680125] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Acquired lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.680310] env[62109]: DEBUG nova.network.neutron [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Refreshing network info cache for port 05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 714.681486] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:cc:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05de4996-e885-4ff3-9685-950d1d793e43', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.688834] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating folder: Project (1ca12aa68e4b4d4d8cf1e3332deb44f4). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.692080] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34c0d8fa-6049-4d9f-a6ab-40e1b71e2a79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.703453] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created folder: Project (1ca12aa68e4b4d4d8cf1e3332deb44f4) in parent group-v244329. [ 714.703453] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating folder: Instances. Parent ref: group-v244350. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.703453] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d3d7502-8756-483a-8f37-bc407660994f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.711923] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created folder: Instances in parent group-v244350. [ 714.713026] env[62109]: DEBUG oslo.service.loopingcall [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.713026] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 714.713026] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3277a89e-92e7-4b55-af05-0a2acfb1c7a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.734913] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.734913] env[62109]: value = "task-1116169" [ 714.734913] env[62109]: _type = "Task" [ 714.734913] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.742630] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116169, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.930023] env[62109]: DEBUG nova.network.neutron [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Updated VIF entry in instance network info cache for port 05de4996-e885-4ff3-9685-950d1d793e43. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 714.930402] env[62109]: DEBUG nova.network.neutron [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Updating instance_info_cache with network_info: [{"id": "05de4996-e885-4ff3-9685-950d1d793e43", "address": "fa:16:3e:5a:cc:47", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05de4996-e8", "ovs_interfaceid": "05de4996-e885-4ff3-9685-950d1d793e43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.064335] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.127237] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116166, 'name': ReconfigVM_Task, 'duration_secs': 0.297309} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.127237] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371/46aa78cc-ea0a-4c1b-aadb-f2a4856c9371.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.127237] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-347fe48f-1ca2-4cf8-a5b1-188d48a41bc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.135318] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 715.135318] env[62109]: value = "task-1116170" [ 715.135318] env[62109]: _type = "Task" [ 715.135318] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.149315] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116170, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.154783] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.246408] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116169, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.256644] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Successfully updated port: a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.440694] env[62109]: DEBUG oslo_concurrency.lockutils [req-7bbac425-85ca-4ff9-944f-a1fd0ed48de6 req-b6daa34f-0c07-415a-a5b6-e11b0faf444d service nova] Releasing lock "refresh_cache-d727d597-c4ac-426e-bdc3-fc4f73a3eac9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.550849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9946cbdf-ad47-41ad-bf89-ff1f59c7fc24 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.551983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.312s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.555876] env[62109]: INFO nova.compute.claims [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.651473] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116170, 'name': Rename_Task, 'duration_secs': 0.151102} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.651741] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 715.651982] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-925a3800-1334-441a-a30c-43af7234e071 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.658772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "refresh_cache-307cf522-173e-4bd8-8535-e4a6db6aa430" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.658990] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 715.659184] env[62109]: DEBUG nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 715.659355] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 715.666489] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 715.666489] env[62109]: value = "task-1116171" [ 715.666489] env[62109]: _type = "Task" [ 715.666489] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.674217] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.676362] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.744911] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116169, 'name': CreateVM_Task, 'duration_secs': 0.510711} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.745164] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 715.745861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.746040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.746516] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 715.746784] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4497015c-d45b-4f49-9779-f8ae09d51225 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.751611] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 715.751611] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da641a-f5db-4ab5-8b42-35cc292cdad7" [ 715.751611] env[62109]: _type = "Task" [ 715.751611] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.759613] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da641a-f5db-4ab5-8b42-35cc292cdad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.764241] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.765982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.765982] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.145124] env[62109]: DEBUG nova.compute.manager [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received event network-vif-plugged-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.145192] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Acquiring lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.145368] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.145530] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.145719] env[62109]: DEBUG nova.compute.manager [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] No waiting events found dispatching network-vif-plugged-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 716.145900] env[62109]: WARNING nova.compute.manager [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received unexpected event network-vif-plugged-a43481f3-cacf-4bd2-9e74-4ca60b37da80 for instance with vm_state building and task_state spawning. [ 716.146088] env[62109]: DEBUG nova.compute.manager [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.146354] env[62109]: DEBUG nova.compute.manager [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing instance network info cache due to event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 716.146496] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.176282] env[62109]: DEBUG oslo_vmware.api [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116171, 'name': PowerOnVM_Task, 'duration_secs': 0.461426} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.176403] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 716.176597] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Took 6.67 seconds to spawn the instance on the hypervisor. [ 716.176775] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 716.177544] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ec910d-a0bf-460a-8b02-732225afd0c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.180092] env[62109]: DEBUG nova.network.neutron [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.263712] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da641a-f5db-4ab5-8b42-35cc292cdad7, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.263780] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.263993] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.264275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.264427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.264606] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 716.264873] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a57649c-f987-404e-8445-a629edb05fd2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.273039] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 716.273248] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 716.274230] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-533cb53a-034f-471f-a509-daa1dcf14d4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.279165] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 716.279165] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c2024d-59c4-0b53-c9bc-c795783c9e53" [ 716.279165] env[62109]: _type = "Task" [ 716.279165] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.287337] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c2024d-59c4-0b53-c9bc-c795783c9e53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.297528] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.513083] env[62109]: DEBUG nova.network.neutron [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.682746] env[62109]: INFO nova.compute.manager [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 307cf522-173e-4bd8-8535-e4a6db6aa430] Took 1.02 seconds to deallocate network for instance. [ 716.696065] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Took 41.73 seconds to build instance. [ 716.790970] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c2024d-59c4-0b53-c9bc-c795783c9e53, 'name': SearchDatastore_Task, 'duration_secs': 0.013471} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.791748] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-342cc62f-721e-4bb8-9bfb-d32cef3613ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.798010] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 716.798010] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52941061-b2b5-63d0-df73-ba583c54f9dd" [ 716.798010] env[62109]: _type = "Task" [ 716.798010] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.805463] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52941061-b2b5-63d0-df73-ba583c54f9dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.936023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2a09ce-142c-4d89-a7ea-950fd0333f79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.943339] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30839cf3-aaa2-47e4-be90-f81f67c678f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.977641] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6003280d-2903-4b32-9f80-34b7835b6117 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.982428] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71f68c5-21bd-4a4e-8ff3-e638a253646e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.996073] env[62109]: DEBUG nova.compute.provider_tree [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.015759] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.016075] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Instance network_info: |[{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 717.016359] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.016532] env[62109]: DEBUG nova.network.neutron [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.018491] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:b7:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a43481f3-cacf-4bd2-9e74-4ca60b37da80', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.025990] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating folder: Project (ac84df552ee74053a00b8204aa781f3b). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.026729] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed166474-cfc4-4c9b-9ddf-2b7a1c1c0ae8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.036821] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created folder: Project (ac84df552ee74053a00b8204aa781f3b) in parent group-v244329. [ 717.037012] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating folder: Instances. Parent ref: group-v244353. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.037235] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c20194b0-ade2-4846-bbac-d8e779cd7343 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.045996] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created folder: Instances in parent group-v244353. [ 717.046336] env[62109]: DEBUG oslo.service.loopingcall [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.046577] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 717.046834] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6806d66e-1b05-4b21-9377-b7e21e40453b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.073439] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.073439] env[62109]: value = "task-1116174" [ 717.073439] env[62109]: _type = "Task" [ 717.073439] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.082298] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116174, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.199312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.362s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.307955] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52941061-b2b5-63d0-df73-ba583c54f9dd, 'name': SearchDatastore_Task, 'duration_secs': 0.03238} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.308255] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.308507] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 717.308766] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f02fe2e-3a66-4f4d-a451-f8a4d1bc296d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.314923] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 717.314923] env[62109]: value = "task-1116175" [ 717.314923] env[62109]: _type = "Task" [ 717.314923] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.322722] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.500443] env[62109]: DEBUG nova.scheduler.client.report [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.584941] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116174, 'name': CreateVM_Task, 'duration_secs': 0.302796} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.585099] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 717.585756] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.585907] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.586265] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 717.587032] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-756b2f41-530c-4475-b75a-59453b2235e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.591858] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 717.591858] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d86d9-8e3e-900e-555e-99410205543c" [ 717.591858] env[62109]: _type = "Task" [ 717.591858] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.602563] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d86d9-8e3e-900e-555e-99410205543c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.703310] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.720662] env[62109]: INFO nova.scheduler.client.report [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleted allocations for instance 307cf522-173e-4bd8-8535-e4a6db6aa430 [ 717.824987] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116175, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.910049] env[62109]: DEBUG nova.network.neutron [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updated VIF entry in instance network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 717.910424] env[62109]: DEBUG nova.network.neutron [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.007276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.007992] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 718.010101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.454s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.102189] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d86d9-8e3e-900e-555e-99410205543c, 'name': SearchDatastore_Task, 'duration_secs': 0.020173} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.102189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.102387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.102623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.102771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.103454] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.103454] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5495d6d7-663a-4b67-96df-1a9328c48e9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.119086] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.119086] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 718.119393] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d54a090-a596-4cee-ade3-3055a5edd09a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.124565] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 718.124565] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fdecc9-566d-64ad-e32c-37d1054a65f1" [ 718.124565] env[62109]: _type = "Task" [ 718.124565] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.132366] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fdecc9-566d-64ad-e32c-37d1054a65f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.232187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c99c994-4664-47b8-b43a-85b2cbb316c9 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "307cf522-173e-4bd8-8535-e4a6db6aa430" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.183s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.232187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.329303] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116175, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.412596] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d31c4ff-2044-41e4-9465-c686dc36b5df req-d79f782c-8687-4c90-943e-78aa056237b5 service nova] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.514897] env[62109]: DEBUG nova.compute.utils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.519487] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.519487] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 718.580271] env[62109]: DEBUG nova.policy [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f740b520eb824f85a2471de72e80f0ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b01655734df4b719f823b78aa0f36c0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 718.637152] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fdecc9-566d-64ad-e32c-37d1054a65f1, 'name': SearchDatastore_Task, 'duration_secs': 0.045753} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.640120] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048ccd93-f3b9-48e4-9b86-81e5bfebc48a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.645606] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 718.645606] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d51c9-2884-90f3-8454-61a75cedf11f" [ 718.645606] env[62109]: _type = "Task" [ 718.645606] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.653906] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d51c9-2884-90f3-8454-61a75cedf11f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.733577] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 718.826229] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116175, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.468588} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.829205] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 718.829627] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 718.830179] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fba25723-5370-473f-b2d0-805b97332727 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.840312] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 718.840312] env[62109]: value = "task-1116176" [ 718.840312] env[62109]: _type = "Task" [ 718.840312] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.853641] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116176, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.934455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b97021-7fb9-4ad8-87fd-1057d29afd0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.941742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45449b5a-69f3-45f9-a963-e3b70abada70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.971757] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Successfully created port: 024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.974202] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383e9adf-c660-4644-bf2c-27d18f913b3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.981982] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013f57b6-eab7-403f-8e31-d41a72b9b1a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.001816] env[62109]: DEBUG nova.compute.provider_tree [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.019395] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 719.156752] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d51c9-2884-90f3-8454-61a75cedf11f, 'name': SearchDatastore_Task, 'duration_secs': 0.013564} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.157021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.157287] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba/751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 719.157551] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de9f35f7-dcb3-4e09-ab44-2a68ec3c5732 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.164390] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 719.164390] env[62109]: value = "task-1116177" [ 719.164390] env[62109]: _type = "Task" [ 719.164390] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.172201] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.256011] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.351670] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116176, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067002} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.351814] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.352900] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abeda2e-ee65-4ac1-a595-32751441a537 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.375534] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.375882] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5cca863-bc6b-4ba5-8e57-252a0b9f652c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.397695] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 719.397695] env[62109]: value = "task-1116178" [ 719.397695] env[62109]: _type = "Task" [ 719.397695] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.407356] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.506758] env[62109]: DEBUG nova.scheduler.client.report [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.674877] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116177, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.907959] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116178, 'name': ReconfigVM_Task, 'duration_secs': 0.387547} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.908266] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.908878] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2232dbef-d4fd-4ec8-8b82-996171c60074 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.915627] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 719.915627] env[62109]: value = "task-1116179" [ 719.915627] env[62109]: _type = "Task" [ 719.915627] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.926437] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116179, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.020927] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.021518] env[62109]: ERROR nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Traceback (most recent call last): [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.driver.spawn(context, instance, image_meta, [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] vm_ref = self.build_virtual_machine(instance, [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.021518] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] for vif in network_info: [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return self._sync_wrapper(fn, *args, **kwargs) [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.wait() [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self[:] = self._gt.wait() [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return self._exit_event.wait() [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] current.throw(*self._exc) [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.021880] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] result = function(*args, **kwargs) [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] return func(*args, **kwargs) [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise e [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] nwinfo = self.network_api.allocate_for_instance( [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] created_port_ids = self._update_ports_for_instance( [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] with excutils.save_and_reraise_exception(): [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] self.force_reraise() [ 720.022275] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise self.value [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] updated_port = self._update_port( [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] _ensure_no_port_binding_failure(port) [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] raise exception.PortBindingFailed(port_id=port['id']) [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] nova.exception.PortBindingFailed: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. [ 720.022662] env[62109]: ERROR nova.compute.manager [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] [ 720.022662] env[62109]: DEBUG nova.compute.utils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 720.023737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.158s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.025416] env[62109]: INFO nova.compute.claims [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.028622] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Build of instance 02f52fdd-ece0-43a5-b7fd-be4172093698 was re-scheduled: Binding failed for port 515338fe-203e-4a77-8a19-931103ee66b6, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 720.029039] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 720.029312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.029499] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquired lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.029698] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 720.031342] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 720.057757] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.058036] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.058257] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.058520] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.058708] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.058871] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.059095] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.059254] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.059417] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.059578] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.059749] env[62109]: DEBUG nova.virt.hardware [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.060661] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036159ee-cd78-487e-865e-69d6faee5408 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.071261] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b40c4c2-b593-4e4f-b822-1792f8197c5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.175889] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116177, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.426744] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116179, 'name': Rename_Task, 'duration_secs': 0.258745} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.427037] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 720.427286] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0fed93-ca84-4a01-8cba-0fe250a7e56c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.434062] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 720.434062] env[62109]: value = "task-1116180" [ 720.434062] env[62109]: _type = "Task" [ 720.434062] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.443941] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.554585] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.634784] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.679245] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116177, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.511286} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.679245] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba/751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 720.679245] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 720.679245] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4b83111-58e7-4a9d-982a-35a7955ec22c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.685334] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 720.685334] env[62109]: value = "task-1116181" [ 720.685334] env[62109]: _type = "Task" [ 720.685334] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.693814] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.876196] env[62109]: DEBUG nova.compute.manager [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Received event network-vif-plugged-024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 720.876321] env[62109]: DEBUG oslo_concurrency.lockutils [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] Acquiring lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.876530] env[62109]: DEBUG oslo_concurrency.lockutils [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.876697] env[62109]: DEBUG oslo_concurrency.lockutils [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.876866] env[62109]: DEBUG nova.compute.manager [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] No waiting events found dispatching network-vif-plugged-024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 720.877292] env[62109]: WARNING nova.compute.manager [req-9fe22ee8-ebcc-4c0e-921d-5b3b0b8ffc3e req-3c3116c5-c01e-4968-8d60-510d5f75f981 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Received unexpected event network-vif-plugged-024ef821-8029-4165-92f1-25cab3da46ce for instance with vm_state building and task_state spawning. [ 720.948828] env[62109]: DEBUG oslo_vmware.api [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116180, 'name': PowerOnVM_Task, 'duration_secs': 0.491116} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.948828] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 720.948828] env[62109]: INFO nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Took 8.91 seconds to spawn the instance on the hypervisor. [ 720.948828] env[62109]: DEBUG nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 720.949530] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93773d6d-5b84-47dd-912b-77fa8eaadf9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.078065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.078065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.108563] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Successfully updated port: 024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.135470] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Releasing lock "refresh_cache-02f52fdd-ece0-43a5-b7fd-be4172093698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.135690] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 721.135866] env[62109]: DEBUG nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.136045] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 721.162655] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 721.198179] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066658} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.198771] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 721.200358] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1210fa2d-0faa-46e2-af4e-cdd228e6f651 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.226425] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba/751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.229464] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b811b00f-6c90-40e9-a8b5-a5c556d0df2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.249912] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 721.249912] env[62109]: value = "task-1116182" [ 721.249912] env[62109]: _type = "Task" [ 721.249912] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.261640] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116182, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.470579] env[62109]: INFO nova.compute.manager [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Took 45.89 seconds to build instance. [ 721.490530] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e99f61a-93ae-4260-9a5a-329e09206959 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.499451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b3795b-f802-4da3-8c24-515baf1bf991 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.533269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f113f18-eaca-4095-b739-840219d40d09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.540979] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c88b5d3-7988-4f8f-b12f-b1f1f5a82e83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.554450] env[62109]: DEBUG nova.compute.provider_tree [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.611358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.611967] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquired lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.611967] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 721.665425] env[62109]: DEBUG nova.network.neutron [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.760534] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116182, 'name': ReconfigVM_Task, 'duration_secs': 0.271784} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.760534] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba/751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 721.761181] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c24ae12-968f-4f74-9bcb-b786c5951b48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.767671] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 721.767671] env[62109]: value = "task-1116183" [ 721.767671] env[62109]: _type = "Task" [ 721.767671] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.777472] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116183, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.972351] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d559cbdc-3287-48b1-b081-339f349df96e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.562s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.057263] env[62109]: DEBUG nova.scheduler.client.report [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.166211] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.168337] env[62109]: INFO nova.compute.manager [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 02f52fdd-ece0-43a5-b7fd-be4172093698] Took 1.03 seconds to deallocate network for instance. [ 722.199908] env[62109]: INFO nova.compute.manager [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Rebuilding instance [ 722.254792] env[62109]: DEBUG nova.compute.manager [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 722.255756] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fd721f-6be6-4e70-a724-72f28f782722 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.281704] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116183, 'name': Rename_Task, 'duration_secs': 0.149877} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.281704] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 722.281704] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39de2014-b52c-43b9-ae64-9b4188bc6ad2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.288269] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 722.288269] env[62109]: value = "task-1116184" [ 722.288269] env[62109]: _type = "Task" [ 722.288269] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.298286] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.400669] env[62109]: DEBUG nova.network.neutron [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Updating instance_info_cache with network_info: [{"id": "024ef821-8029-4165-92f1-25cab3da46ce", "address": "fa:16:3e:ec:3a:cd", "network": {"id": "17199f5a-8355-48b7-8798-f4d87291f90e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1177748950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b01655734df4b719f823b78aa0f36c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap024ef821-80", "ovs_interfaceid": "024ef821-8029-4165-92f1-25cab3da46ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.475530] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 722.562935] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.563775] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 722.567018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.707s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.568553] env[62109]: INFO nova.compute.claims [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.777549] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 722.777964] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfd83da8-2a7b-4d2a-97d9-a83c32ada049 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.786181] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 722.786181] env[62109]: value = "task-1116185" [ 722.786181] env[62109]: _type = "Task" [ 722.786181] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.801766] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.805247] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.903804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Releasing lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.904255] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Instance network_info: |[{"id": "024ef821-8029-4165-92f1-25cab3da46ce", "address": "fa:16:3e:ec:3a:cd", "network": {"id": "17199f5a-8355-48b7-8798-f4d87291f90e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1177748950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b01655734df4b719f823b78aa0f36c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap024ef821-80", "ovs_interfaceid": "024ef821-8029-4165-92f1-25cab3da46ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 722.905025] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:3a:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8800a981-a89e-42e4-8be9-cace419ba9cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '024ef821-8029-4165-92f1-25cab3da46ce', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.917488] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Creating folder: Project (5b01655734df4b719f823b78aa0f36c0). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 722.917488] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fcf68c88-a8e1-4e8f-a979-b1509cc94f47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.928526] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Created folder: Project (5b01655734df4b719f823b78aa0f36c0) in parent group-v244329. [ 722.928697] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Creating folder: Instances. Parent ref: group-v244356. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 722.928896] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a12e7b3-527d-4ff3-8e25-90454bb4fb20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.938291] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Created folder: Instances in parent group-v244356. [ 722.938983] env[62109]: DEBUG oslo.service.loopingcall [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 722.938983] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 722.938983] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca6cffeb-555b-4e94-8f1d-8da5ca13786c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.958745] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.958745] env[62109]: value = "task-1116188" [ 722.958745] env[62109]: _type = "Task" [ 722.958745] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.967816] env[62109]: DEBUG nova.compute.manager [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Received event network-changed-024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 722.968041] env[62109]: DEBUG nova.compute.manager [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Refreshing instance network info cache due to event network-changed-024ef821-8029-4165-92f1-25cab3da46ce. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 722.968316] env[62109]: DEBUG oslo_concurrency.lockutils [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] Acquiring lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.968438] env[62109]: DEBUG oslo_concurrency.lockutils [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] Acquired lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.968666] env[62109]: DEBUG nova.network.neutron [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Refreshing network info cache for port 024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 722.973525] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116188, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.000485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.068655] env[62109]: DEBUG nova.compute.utils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 723.070286] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 723.070367] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 723.129578] env[62109]: DEBUG nova.policy [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f1f034e76ac41ab86dd61b8efc124c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47687318ee304ac0a32de02f47070193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 723.202717] env[62109]: INFO nova.scheduler.client.report [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleted allocations for instance 02f52fdd-ece0-43a5-b7fd-be4172093698 [ 723.299061] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116185, 'name': PowerOffVM_Task, 'duration_secs': 0.203809} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.299906] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 723.303018] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 723.303018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fc24a1-b3ae-40cc-8caa-8af40a9ed2d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.307447] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116184, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.313165] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 723.313165] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6693fff-ca1b-4904-b916-f1ae857d1df6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.379024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 723.379024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 723.379024] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore1] d727d597-c4ac-426e-bdc3-fc4f73a3eac9 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.379024] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ff63dbd-8fb8-4dc3-9576-9b67e52caf84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.383935] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 723.383935] env[62109]: value = "task-1116190" [ 723.383935] env[62109]: _type = "Task" [ 723.383935] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.392939] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.469407] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116188, 'name': CreateVM_Task, 'duration_secs': 0.299745} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.469812] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 723.470649] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.470928] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.471360] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 723.471705] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bf175c0-002b-469a-9fcb-5d4fc6d048a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.477729] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 723.477729] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5278ad5a-2bf4-ec1b-f3d9-cdbc60586d6d" [ 723.477729] env[62109]: _type = "Task" [ 723.477729] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.485899] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5278ad5a-2bf4-ec1b-f3d9-cdbc60586d6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.573971] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 723.655329] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Successfully created port: 4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.713109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e1789a90-bbb9-4817-8934-b2a09c055ea1 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "02f52fdd-ece0-43a5-b7fd-be4172093698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.962s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.735750] env[62109]: DEBUG nova.network.neutron [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Updated VIF entry in instance network info cache for port 024ef821-8029-4165-92f1-25cab3da46ce. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 723.736516] env[62109]: DEBUG nova.network.neutron [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Updating instance_info_cache with network_info: [{"id": "024ef821-8029-4165-92f1-25cab3da46ce", "address": "fa:16:3e:ec:3a:cd", "network": {"id": "17199f5a-8355-48b7-8798-f4d87291f90e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1177748950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b01655734df4b719f823b78aa0f36c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap024ef821-80", "ovs_interfaceid": "024ef821-8029-4165-92f1-25cab3da46ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.802239] env[62109]: DEBUG oslo_vmware.api [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116184, 'name': PowerOnVM_Task, 'duration_secs': 1.036152} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.802794] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 723.803112] env[62109]: INFO nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Took 9.20 seconds to spawn the instance on the hypervisor. [ 723.803414] env[62109]: DEBUG nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 723.807259] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108a709b-2925-4a25-8adb-488396af08b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.898414] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167817} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.898692] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.898873] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 723.899056] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 723.988807] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5278ad5a-2bf4-ec1b-f3d9-cdbc60586d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.017941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.989177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.989452] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.989722] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.989900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.990151] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.990679] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec28c042-28e9-4b8a-9e1d-53bee1e66a36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.002028] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.002028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 724.003270] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9b60374-3aa9-4c85-96af-f3a8e0c489b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.011761] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 724.011761] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e64249-1a51-b388-96bb-83ab9f618f27" [ 724.011761] env[62109]: _type = "Task" [ 724.011761] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.022297] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e64249-1a51-b388-96bb-83ab9f618f27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.037073] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e12ba8-bcf7-4d6b-b19b-ffa6c0de4245 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.043892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a109ecd6-98e5-4006-a0b8-6c79b3419a98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.074655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffb5928-2b43-4481-8776-ede50f8b9363 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.086117] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd736769-b7b2-4679-90ca-f3121fdedc38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.101420] env[62109]: DEBUG nova.compute.provider_tree [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.219777] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 724.241445] env[62109]: DEBUG oslo_concurrency.lockutils [req-69b34180-4b7d-4d29-a721-61d05ae6d195 req-49d01351-8a57-4b40-adfb-0e80ec6c4477 service nova] Releasing lock "refresh_cache-f6d3a50c-bcc3-4a6f-969f-4e629646f427" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.326370] env[62109]: INFO nova.compute.manager [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Took 46.76 seconds to build instance. [ 724.375954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "17ee49a9-d980-46c0-996e-6a43c80be434" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.376372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.376749] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.377009] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.377236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.379485] env[62109]: INFO nova.compute.manager [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Terminating instance [ 724.383678] env[62109]: DEBUG nova.compute.manager [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 724.385025] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 724.385025] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cede7b-0f55-4e58-b1b5-c506644252bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.392877] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 724.394395] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5998c735-7799-4ca0-96b1-3bc640d32ee1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.399253] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 724.399253] env[62109]: value = "task-1116191" [ 724.399253] env[62109]: _type = "Task" [ 724.399253] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.412015] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.500928] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.501227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.501488] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.501676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.501904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.504062] env[62109]: INFO nova.compute.manager [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Terminating instance [ 724.506072] env[62109]: DEBUG nova.compute.manager [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 724.506308] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 724.507243] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e1ffbc-927a-4fd9-a9cb-9791f757f576 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.518206] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 724.521147] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2868314a-9a22-418c-ae2e-434c86ada0c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.526025] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e64249-1a51-b388-96bb-83ab9f618f27, 'name': SearchDatastore_Task, 'duration_secs': 0.012429} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.528207] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 724.528207] env[62109]: value = "task-1116192" [ 724.528207] env[62109]: _type = "Task" [ 724.528207] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.528675] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8d494ee-320b-4ff7-ad9c-21fbc9d05e79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.536561] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 724.536561] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a089f6-ccfc-a248-8f3f-aa1915aac75c" [ 724.536561] env[62109]: _type = "Task" [ 724.536561] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.540053] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.548044] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a089f6-ccfc-a248-8f3f-aa1915aac75c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.586223] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 724.604604] env[62109]: DEBUG nova.scheduler.client.report [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.618059] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.618356] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.618509] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.618689] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.618848] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.618984] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.619238] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.619400] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.619990] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.619990] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.619990] env[62109]: DEBUG nova.virt.hardware [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.621016] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14090393-6c0e-4862-a98c-89726dc63a41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.630027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eadd250-acc7-4072-b821-e6032298494c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.744031] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.829327] env[62109]: DEBUG oslo_concurrency.lockutils [None req-128f859a-d37b-4bf5-8cd3-5baba1c89c11 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.475s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.913478] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116191, 'name': PowerOffVM_Task, 'duration_secs': 0.206206} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.913742] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 724.913909] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 724.914165] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eef61ddd-d1d0-4ccb-b37d-4b0421ea5332 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.930070] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.930329] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.930487] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.930665] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.930810] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.930956] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.931241] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.931436] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.931583] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.931743] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.931949] env[62109]: DEBUG nova.virt.hardware [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.932833] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9a4b94-ea1b-420a-a0ea-5ef9b6129e33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.941546] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1deb86-0c3b-4095-9eac-64098bd1663f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.956242] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:cc:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05de4996-e885-4ff3-9685-950d1d793e43', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.964314] env[62109]: DEBUG oslo.service.loopingcall [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.964314] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 724.964547] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-221ac7c9-cc68-4118-b350-7a69acaa7b69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.980265] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 724.980473] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 724.981346] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleting the datastore file [datastore1] 17ee49a9-d980-46c0-996e-6a43c80be434 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 724.981346] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dd5cbb3-9b17-412d-bc90-ad0439699bc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.985266] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.985266] env[62109]: value = "task-1116194" [ 724.985266] env[62109]: _type = "Task" [ 724.985266] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.992100] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 724.992100] env[62109]: value = "task-1116195" [ 724.992100] env[62109]: _type = "Task" [ 724.992100] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.996218] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116194, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.999227] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.042230] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116192, 'name': PowerOffVM_Task, 'duration_secs': 0.211808} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.045928] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 725.046209] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 725.046513] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eec8cdd-56c9-42eb-a308-63d6cf249171 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.053776] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a089f6-ccfc-a248-8f3f-aa1915aac75c, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.054199] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.054396] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] f6d3a50c-bcc3-4a6f-969f-4e629646f427/f6d3a50c-bcc3-4a6f-969f-4e629646f427.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 725.054753] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f242318-dffe-44ad-913c-5fb31d27a979 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.062425] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 725.062425] env[62109]: value = "task-1116197" [ 725.062425] env[62109]: _type = "Task" [ 725.062425] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.069317] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.112133] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.113036] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.116167] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.025s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.116437] env[62109]: DEBUG nova.objects.instance [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 725.126134] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 725.126134] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 725.126134] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleting the datastore file [datastore1] 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 725.126134] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb3e3708-1f4e-4fe4-9f3f-13617b5176e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.135022] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for the task: (returnval){ [ 725.135022] env[62109]: value = "task-1116198" [ 725.135022] env[62109]: _type = "Task" [ 725.135022] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.142409] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.222240] env[62109]: DEBUG nova.compute.manager [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Received event network-vif-plugged-4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.222240] env[62109]: DEBUG oslo_concurrency.lockutils [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] Acquiring lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.222240] env[62109]: DEBUG oslo_concurrency.lockutils [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.223156] env[62109]: DEBUG oslo_concurrency.lockutils [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.223574] env[62109]: DEBUG nova.compute.manager [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] No waiting events found dispatching network-vif-plugged-4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 725.224459] env[62109]: WARNING nova.compute.manager [req-06412376-b1d1-4dc8-9609-1d244c49eec2 req-ca3c1459-47b3-4d3a-9de2-cf289a214b93 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Received unexpected event network-vif-plugged-4fbac6b8-6bfd-4785-aed0-7407c636e189 for instance with vm_state building and task_state spawning. [ 725.335317] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 725.452147] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Successfully updated port: 4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.506426] env[62109]: DEBUG oslo_vmware.api [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148519} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.513792] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 725.514284] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 725.514686] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 725.514992] env[62109]: INFO nova.compute.manager [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Took 1.13 seconds to destroy the instance on the hypervisor. [ 725.515493] env[62109]: DEBUG oslo.service.loopingcall [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.515794] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116194, 'name': CreateVM_Task, 'duration_secs': 0.321032} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.516103] env[62109]: DEBUG nova.compute.manager [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.516296] env[62109]: DEBUG nova.network.neutron [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 725.518489] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 725.518919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.519105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.519648] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.520467] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a77baff6-d3a0-4598-b277-05c2f3fa9f75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.526823] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 725.526823] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6917a-3b5b-bf10-e63a-381a5e0f3b4b" [ 725.526823] env[62109]: _type = "Task" [ 725.526823] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.535805] env[62109]: DEBUG nova.compute.manager [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.535990] env[62109]: DEBUG nova.compute.manager [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing instance network info cache due to event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.536211] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.536346] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.536495] env[62109]: DEBUG nova.network.neutron [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.540944] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6917a-3b5b-bf10-e63a-381a5e0f3b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.570388] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493854} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.570775] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] f6d3a50c-bcc3-4a6f-969f-4e629646f427/f6d3a50c-bcc3-4a6f-969f-4e629646f427.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 725.571026] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.571026] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93d1ed93-7465-4923-bd38-7391eb11ff64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.577481] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 725.577481] env[62109]: value = "task-1116199" [ 725.577481] env[62109]: _type = "Task" [ 725.577481] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.586473] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.621304] env[62109]: DEBUG nova.compute.utils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.622483] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 725.622650] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 725.645615] env[62109]: DEBUG oslo_vmware.api [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Task: {'id': task-1116198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223417} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.645880] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 725.646075] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 725.646264] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 725.646433] env[62109]: INFO nova.compute.manager [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Took 1.14 seconds to destroy the instance on the hypervisor. [ 725.646671] env[62109]: DEBUG oslo.service.loopingcall [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.646869] env[62109]: DEBUG nova.compute.manager [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.646963] env[62109]: DEBUG nova.network.neutron [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 725.707447] env[62109]: DEBUG nova.policy [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c49484657292430db0569311e6a5cc46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45ad1c039aa9463e977cf986ce4dccf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 725.870420] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.955085] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.955295] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquired lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.955460] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 726.041510] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6917a-3b5b-bf10-e63a-381a5e0f3b4b, 'name': SearchDatastore_Task, 'duration_secs': 0.010149} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.047847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.048197] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.048485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.048752] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.048988] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.050676] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85533c93-f0ef-4fc4-8c2d-c18b836088fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.061170] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.062267] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 726.064408] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70065c41-8561-48aa-9862-53b7fd4af958 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.072101] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 726.072101] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52668979-55b8-eef8-d38c-383e20d1600d" [ 726.072101] env[62109]: _type = "Task" [ 726.072101] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.093488] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52668979-55b8-eef8-d38c-383e20d1600d, 'name': SearchDatastore_Task, 'duration_secs': 0.009394} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.097043] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.268186} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.097249] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cade67e8-3ce1-4968-9d1b-19f57d1e33f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.099886] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.101730] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc039b8-3a61-4a00-968e-681ff1224336 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.107724] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 726.107724] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529b5e82-b0cb-c718-62df-fa6d396b7690" [ 726.107724] env[62109]: _type = "Task" [ 726.107724] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.129671] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] f6d3a50c-bcc3-4a6f-969f-4e629646f427/f6d3a50c-bcc3-4a6f-969f-4e629646f427.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.130714] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Successfully created port: 23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.135716] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.143208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c945bf87-c343-42dd-a245-c3a797268dbd tempest-ServersAdmin275Test-193789471 tempest-ServersAdmin275Test-193789471-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.144602] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b319ce3-370d-4046-a0b1-efca6d1aaa1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.162989] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.053s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.163330] env[62109]: DEBUG nova.objects.instance [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lazy-loading 'resources' on Instance uuid 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.172784] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529b5e82-b0cb-c718-62df-fa6d396b7690, 'name': SearchDatastore_Task, 'duration_secs': 0.010993} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.174776] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.175094] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 726.175401] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 726.175401] env[62109]: value = "task-1116200" [ 726.175401] env[62109]: _type = "Task" [ 726.175401] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.176177] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d488c5a-9bb7-48a5-a5e0-c3ac27baa817 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.188459] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.190225] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 726.190225] env[62109]: value = "task-1116201" [ 726.190225] env[62109]: _type = "Task" [ 726.190225] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.403493] env[62109]: DEBUG nova.network.neutron [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.502143] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 726.554339] env[62109]: DEBUG nova.network.neutron [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.658111] env[62109]: DEBUG nova.network.neutron [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updated VIF entry in instance network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.659668] env[62109]: DEBUG nova.network.neutron [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.664683] env[62109]: DEBUG nova.network.neutron [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Updating instance_info_cache with network_info: [{"id": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "address": "fa:16:3e:fd:2b:01", "network": {"id": "e338ba02-7aeb-4206-a9e7-a63d8b692ad1", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-843836463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47687318ee304ac0a32de02f47070193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbac6b8-6b", "ovs_interfaceid": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.691023] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116200, 'name': ReconfigVM_Task, 'duration_secs': 0.497023} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.691023] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Reconfigured VM instance instance-00000023 to attach disk [datastore2] f6d3a50c-bcc3-4a6f-969f-4e629646f427/f6d3a50c-bcc3-4a6f-969f-4e629646f427.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.691023] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e7b464b-db87-45c7-b399-13d63b68fac2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.705279] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496256} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.706496] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 726.706770] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.707266] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 726.707266] env[62109]: value = "task-1116202" [ 726.707266] env[62109]: _type = "Task" [ 726.707266] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.709575] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d56dc4fd-4731-472e-a24c-6873f53a80e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.718372] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 726.718372] env[62109]: value = "task-1116203" [ 726.718372] env[62109]: _type = "Task" [ 726.718372] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.721694] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116202, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.733571] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.905914] env[62109]: INFO nova.compute.manager [-] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Took 1.39 seconds to deallocate network for instance. [ 727.060525] env[62109]: INFO nova.compute.manager [-] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Took 1.41 seconds to deallocate network for instance. [ 727.063713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dca81e-1b96-4d49-8a81-6f8ae28272db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.080278] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdee419-604c-4865-b3a9-8a43955aa5a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.112872] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46e0e35-f8d0-4de1-bcea-600d6137be1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.121117] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d91c51-af96-4e9c-a7e3-fbe30b79a1b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.135344] env[62109]: DEBUG nova.compute.provider_tree [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.151093] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.171187] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccde68ed-bbc3-40fb-adb0-77521de551cc req-404dd246-fb26-4d5d-a113-67cba954b8dd service nova] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.171675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Releasing lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.171950] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Instance network_info: |[{"id": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "address": "fa:16:3e:fd:2b:01", "network": {"id": "e338ba02-7aeb-4206-a9e7-a63d8b692ad1", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-843836463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47687318ee304ac0a32de02f47070193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbac6b8-6b", "ovs_interfaceid": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 727.173634] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:2b:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fbac6b8-6bfd-4785-aed0-7407c636e189', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.179814] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Creating folder: Project (47687318ee304ac0a32de02f47070193). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 727.181947] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.182198] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.182355] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.182542] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.182685] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.182828] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.183045] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.183209] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.183372] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.183528] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.183695] env[62109]: DEBUG nova.virt.hardware [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.184215] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6a296bd-d99e-4c17-9735-f52a2992c0b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.186378] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ee46d-69a0-4dea-b07c-48c6f6825e4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.194273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c835e814-f5b8-4dba-b212-0d905668d016 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.198713] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Created folder: Project (47687318ee304ac0a32de02f47070193) in parent group-v244329. [ 727.198889] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Creating folder: Instances. Parent ref: group-v244360. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 727.199375] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b064b9b-4d3b-46bb-a466-d99454791f96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.209196] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Created folder: Instances in parent group-v244360. [ 727.209406] env[62109]: DEBUG oslo.service.loopingcall [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.209588] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 727.209767] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e8e2c17-51a9-435c-9595-3c45e64493f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.234706] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116202, 'name': Rename_Task, 'duration_secs': 0.486236} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.237231] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 727.237440] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.237440] env[62109]: value = "task-1116206" [ 727.237440] env[62109]: _type = "Task" [ 727.237440] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.237667] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ecfbad4-18df-494b-b91c-8a8431814133 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.248309] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06527} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.251510] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.251860] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 727.251860] env[62109]: value = "task-1116207" [ 727.251860] env[62109]: _type = "Task" [ 727.251860] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.251991] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116206, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.253560] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda06790-84e1-44f4-872c-cf509eed2b0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.256890] env[62109]: DEBUG nova.compute.manager [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Received event network-changed-4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.257080] env[62109]: DEBUG nova.compute.manager [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Refreshing instance network info cache due to event network-changed-4fbac6b8-6bfd-4785-aed0-7407c636e189. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 727.257289] env[62109]: DEBUG oslo_concurrency.lockutils [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] Acquiring lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.257429] env[62109]: DEBUG oslo_concurrency.lockutils [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] Acquired lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.257583] env[62109]: DEBUG nova.network.neutron [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Refreshing network info cache for port 4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.281689] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.285428] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8da6246-c2db-49fd-961c-641388f75f2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.300556] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116207, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.306330] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 727.306330] env[62109]: value = "task-1116208" [ 727.306330] env[62109]: _type = "Task" [ 727.306330] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.314980] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.414629] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.567373] env[62109]: DEBUG nova.compute.manager [req-47db533f-e694-4f94-9fee-a23be3c022fa req-28865b71-ba40-4eb3-8959-b810b38ff718 service nova] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Received event network-vif-deleted-5ddcdbcd-b248-4185-acdb-ef37c8631a5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.574967] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.638640] env[62109]: DEBUG nova.scheduler.client.report [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.754688] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116206, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.767905] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116207, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.818128] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116208, 'name': ReconfigVM_Task, 'duration_secs': 0.280844} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.818440] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Reconfigured VM instance instance-00000021 to attach disk [datastore2] d727d597-c4ac-426e-bdc3-fc4f73a3eac9/d727d597-c4ac-426e-bdc3-fc4f73a3eac9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.821694] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06877a8b-3dc9-4eda-878e-6fc2ef6b9cc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.829572] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 727.829572] env[62109]: value = "task-1116209" [ 727.829572] env[62109]: _type = "Task" [ 727.829572] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.836846] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116209, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.027708] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Successfully updated port: 23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 728.083191] env[62109]: DEBUG nova.network.neutron [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Updated VIF entry in instance network info cache for port 4fbac6b8-6bfd-4785-aed0-7407c636e189. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 728.083660] env[62109]: DEBUG nova.network.neutron [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Updating instance_info_cache with network_info: [{"id": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "address": "fa:16:3e:fd:2b:01", "network": {"id": "e338ba02-7aeb-4206-a9e7-a63d8b692ad1", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-843836463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47687318ee304ac0a32de02f47070193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbac6b8-6b", "ovs_interfaceid": "4fbac6b8-6bfd-4785-aed0-7407c636e189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.143714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.981s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.146277] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.386s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.147791] env[62109]: INFO nova.compute.claims [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.164714] env[62109]: INFO nova.scheduler.client.report [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Deleted allocations for instance 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1 [ 728.251353] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116206, 'name': CreateVM_Task, 'duration_secs': 0.670121} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.251522] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 728.252190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.252349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.252663] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 728.253191] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54c4e42a-d219-4064-a59f-bb7f08a23ce8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.260338] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 728.260338] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5208a292-6587-da87-9ab7-d11105d7d120" [ 728.260338] env[62109]: _type = "Task" [ 728.260338] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.266354] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116207, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.271300] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5208a292-6587-da87-9ab7-d11105d7d120, 'name': SearchDatastore_Task, 'duration_secs': 0.008861} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.271550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.271772] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.271988] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.272144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.272320] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.272545] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16bb8cfc-a85e-460f-af54-6cd6bd6616ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.279406] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.279577] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 728.280235] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57ae38e2-b1c2-41ea-a016-ccd0bbfe1e06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.284933] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 728.284933] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520fbf33-77fb-19df-e2e3-eebedf399927" [ 728.284933] env[62109]: _type = "Task" [ 728.284933] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.292325] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520fbf33-77fb-19df-e2e3-eebedf399927, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.338627] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116209, 'name': Rename_Task, 'duration_secs': 0.208957} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.338911] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 728.339168] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e76c9e3a-04f1-4a14-8e7c-abc536c37896 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.346013] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 728.346013] env[62109]: value = "task-1116210" [ 728.346013] env[62109]: _type = "Task" [ 728.346013] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.353599] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.531011] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.531191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.531347] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 728.586062] env[62109]: DEBUG oslo_concurrency.lockutils [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] Releasing lock "refresh_cache-8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.586207] env[62109]: DEBUG nova.compute.manager [req-b27b047d-6994-480f-8c75-cbe87441e189 req-44010117-acc8-462b-b3ee-9dc7b3e70aa7 service nova] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Received event network-vif-deleted-6f0fba83-886e-4b4d-80b4-14d23a8d48f3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 728.672038] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6be925e9-0d74-47d6-8725-4dc04239cd68 tempest-ServersAdmin275Test-273144627 tempest-ServersAdmin275Test-273144627-project-member] Lock "934e3a1b-8d3f-4de0-ae8b-35b82d3859a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.285s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.767066] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116207, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.794094] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520fbf33-77fb-19df-e2e3-eebedf399927, 'name': SearchDatastore_Task, 'duration_secs': 0.008695} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.794873] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b118f79-89fa-48ea-bdf4-53984ff140d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.799973] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 728.799973] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a21f8-7425-271c-2ffb-be2fbda26dc4" [ 728.799973] env[62109]: _type = "Task" [ 728.799973] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.807585] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a21f8-7425-271c-2ffb-be2fbda26dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.855864] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116210, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.077629] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.249313] env[62109]: DEBUG nova.network.neutron [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [{"id": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "address": "fa:16:3e:b0:2a:e9", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e271d5-4d", "ovs_interfaceid": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.275070] env[62109]: DEBUG oslo_vmware.api [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116207, 'name': PowerOnVM_Task, 'duration_secs': 1.754047} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.275839] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 729.275839] env[62109]: INFO nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 9.24 seconds to spawn the instance on the hypervisor. [ 729.276019] env[62109]: DEBUG nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 729.276912] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb20e79c-7b1b-470a-96d8-a384f1ec9830 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.320685] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a21f8-7425-271c-2ffb-be2fbda26dc4, 'name': SearchDatastore_Task, 'duration_secs': 0.009204} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.321046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.321604] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e/8d9a7696-0465-4895-9ce8-4b4b8b2ca59e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 729.321604] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c2f970c-653c-4ce3-9b6a-248437ec98ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.329227] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 729.329227] env[62109]: value = "task-1116211" [ 729.329227] env[62109]: _type = "Task" [ 729.329227] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.339474] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.354913] env[62109]: DEBUG oslo_vmware.api [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116210, 'name': PowerOnVM_Task, 'duration_secs': 0.628044} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.357673] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 729.357904] env[62109]: DEBUG nova.compute.manager [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 729.358892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c9cd3c-0be7-47e0-9823-3e9d182e06f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.591205] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67219564-1424-4d71-8a73-9ac9762202a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.600687] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0168d8bb-4a9d-45c2-9898-ffc205d55a4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.635856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df5fa3a-999e-459a-a212-c0d536f03a00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.647958] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c719e4-fdee-4faf-92a5-90e5543753ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.664937] env[62109]: DEBUG nova.compute.provider_tree [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.721044] env[62109]: DEBUG nova.compute.manager [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Received event network-vif-plugged-23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.721269] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Acquiring lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.721795] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.722041] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.723216] env[62109]: DEBUG nova.compute.manager [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] No waiting events found dispatching network-vif-plugged-23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 729.723216] env[62109]: WARNING nova.compute.manager [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Received unexpected event network-vif-plugged-23e271d5-4d3a-4ad9-934f-4123916de8c1 for instance with vm_state building and task_state spawning. [ 729.723216] env[62109]: DEBUG nova.compute.manager [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Received event network-changed-23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.723216] env[62109]: DEBUG nova.compute.manager [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Refreshing instance network info cache due to event network-changed-23e271d5-4d3a-4ad9-934f-4123916de8c1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 729.723216] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Acquiring lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.755448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.755831] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance network_info: |[{"id": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "address": "fa:16:3e:b0:2a:e9", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e271d5-4d", "ovs_interfaceid": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 729.756613] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Acquired lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.756822] env[62109]: DEBUG nova.network.neutron [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Refreshing network info cache for port 23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 729.758160] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:2a:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1ce8361b-fd8e-4971-a37f-b84a4f77db19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23e271d5-4d3a-4ad9-934f-4123916de8c1', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.766735] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating folder: Project (45ad1c039aa9463e977cf986ce4dccf4). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 729.767290] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38ba5421-e297-4e53-be92-25f488bbca15 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.782024] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created folder: Project (45ad1c039aa9463e977cf986ce4dccf4) in parent group-v244329. [ 729.782024] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating folder: Instances. Parent ref: group-v244363. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 729.782024] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d895a636-36cb-4d1e-9852-128ffa6dc42b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.794188] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created folder: Instances in parent group-v244363. [ 729.794431] env[62109]: DEBUG oslo.service.loopingcall [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.794634] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 729.794841] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-118bae6b-487a-4695-8faa-7c2a59ec99b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.813279] env[62109]: INFO nova.compute.manager [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 47.60 seconds to build instance. [ 729.817215] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.817215] env[62109]: value = "task-1116214" [ 729.817215] env[62109]: _type = "Task" [ 729.817215] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.825351] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116214, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.840026] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446772} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.840312] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e/8d9a7696-0465-4895-9ce8-4b4b8b2ca59e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 729.840525] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.840774] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7c98125-ae96-49cb-a03c-38a0db97e136 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.848424] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 729.848424] env[62109]: value = "task-1116215" [ 729.848424] env[62109]: _type = "Task" [ 729.848424] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.856585] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.878610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.170562] env[62109]: DEBUG nova.scheduler.client.report [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.319021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d0e07abe-3320-4e7a-8996-a9a75d33322f tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.958s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.329552] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116214, 'name': CreateVM_Task, 'duration_secs': 0.462213} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.329726] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 730.332375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.332375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.332375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 730.332375] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d24f0698-b9c5-4739-ab08-d69c3ec063a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.340042] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 730.340042] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5295e3e6-d00e-2e59-84e8-b03e0f95041c" [ 730.340042] env[62109]: _type = "Task" [ 730.340042] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.349129] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5295e3e6-d00e-2e59-84e8-b03e0f95041c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.367798] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064157} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.368953] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 730.369485] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb05a57-0a8e-4a6b-9a90-249129de298f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.398640] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e/8d9a7696-0465-4895-9ce8-4b4b8b2ca59e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.399887] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-197d686e-630b-4ad1-a1b1-3a610f53d707 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.425148] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 730.425148] env[62109]: value = "task-1116216" [ 730.425148] env[62109]: _type = "Task" [ 730.425148] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.439201] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116216, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.654735] env[62109]: DEBUG nova.network.neutron [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updated VIF entry in instance network info cache for port 23e271d5-4d3a-4ad9-934f-4123916de8c1. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 730.655162] env[62109]: DEBUG nova.network.neutron [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [{"id": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "address": "fa:16:3e:b0:2a:e9", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e271d5-4d", "ovs_interfaceid": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.674796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.675365] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 730.678614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.158s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.679956] env[62109]: INFO nova.compute.claims [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.819869] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 730.850970] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5295e3e6-d00e-2e59-84e8-b03e0f95041c, 'name': SearchDatastore_Task, 'duration_secs': 0.010225} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.851306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.851547] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.851837] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.852032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.852278] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.852586] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a66cbfc1-d6f4-4902-848c-5a4305867c27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.861315] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.861487] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 730.862815] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5465007-b215-4328-b4c4-2969a0b125e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.868704] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 730.868704] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526ae69f-b10e-720c-a152-959b07d4f075" [ 730.868704] env[62109]: _type = "Task" [ 730.868704] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.869040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.869233] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.869688] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.869688] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.869845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.875203] env[62109]: INFO nova.compute.manager [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Terminating instance [ 730.877290] env[62109]: DEBUG nova.compute.manager [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.877487] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 730.878716] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c765e57b-1495-4e64-96d4-1ad19a6fb55d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.885710] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526ae69f-b10e-720c-a152-959b07d4f075, 'name': SearchDatastore_Task, 'duration_secs': 0.008372} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.886833] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471952bb-ea0c-40b9-b463-503cd30f3306 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.890288] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 730.891028] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8bc9543-6ec7-43eb-98ab-7e16deec0023 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.893250] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 730.893250] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52aee82c-44e6-dfa1-f23b-d3ffb05c3451" [ 730.893250] env[62109]: _type = "Task" [ 730.893250] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.901222] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 730.901222] env[62109]: value = "task-1116217" [ 730.901222] env[62109]: _type = "Task" [ 730.901222] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.905874] env[62109]: DEBUG nova.compute.manager [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 730.906825] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52aee82c-44e6-dfa1-f23b-d3ffb05c3451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.907638] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cde811-afd5-4c5b-bde5-b750fe872fea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.914656] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.936014] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116216, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.157728] env[62109]: DEBUG oslo_concurrency.lockutils [req-6489f90a-a7ab-4a6d-9ce2-545b6b887336 req-3aaba5c8-9ea3-4c79-ba0b-87c571e9d778 service nova] Releasing lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.184636] env[62109]: DEBUG nova.compute.utils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.187951] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 731.188130] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 731.253687] env[62109]: DEBUG nova.policy [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1da9d51bcc574df7aa69b59f2018d389', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bff923ccb02449aa834523a0652cbdb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 731.356159] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.413258] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52aee82c-44e6-dfa1-f23b-d3ffb05c3451, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.414446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.414841] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/c753a2db-d701-4508-88bd-4ebe4f32a075.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 731.415582] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f19e2f21-ccd5-4e12-8f29-1fdb222e2a2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.423819] env[62109]: INFO nova.compute.manager [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] instance snapshotting [ 731.427045] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116217, 'name': PowerOffVM_Task, 'duration_secs': 0.212284} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.427691] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 731.428167] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 731.434397] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2372115d-d829-4cef-8cb7-0247d408e6ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.436059] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2039f05-b8a4-445c-acad-e45487c95313 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.441359] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 731.441359] env[62109]: value = "task-1116218" [ 731.441359] env[62109]: _type = "Task" [ 731.441359] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.452115] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116216, 'name': ReconfigVM_Task, 'duration_secs': 1.012586} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.453449] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e/8d9a7696-0465-4895-9ce8-4b4b8b2ca59e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.458642] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36859542-576b-4b66-bdd9-2a7949054f99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.460617] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116218, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.482014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "c694c178-3894-4997-8e99-8f4900a64848" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.482548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.483401] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f71aabd-485d-4bed-a584-6624fd0f7b69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.488887] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 731.488887] env[62109]: value = "task-1116220" [ 731.488887] env[62109]: _type = "Task" [ 731.488887] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.505534] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116220, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.507200] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 731.507447] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 731.509185] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore2] d727d597-c4ac-426e-bdc3-fc4f73a3eac9 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.509185] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bca534b2-5c55-4fcc-af8a-4dcdd99e3d3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.514407] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 731.514407] env[62109]: value = "task-1116221" [ 731.514407] env[62109]: _type = "Task" [ 731.514407] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.523509] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.692025] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 731.811654] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Successfully created port: 76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.969265] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116218, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.002054] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116220, 'name': Rename_Task, 'duration_secs': 0.180574} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.002361] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 732.002739] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a03d5b6-9dc5-4d5b-8fcc-41ad084f15ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.008622] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 732.010356] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d998196a-0e68-48db-8a8c-dc19b6aede46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.015039] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 732.015039] env[62109]: value = "task-1116222" [ 732.015039] env[62109]: _type = "Task" [ 732.015039] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.020122] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 732.020122] env[62109]: value = "task-1116223" [ 732.020122] env[62109]: _type = "Task" [ 732.020122] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.040914] env[62109]: DEBUG oslo_vmware.api [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.049584] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.049826] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 732.050015] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 732.050203] env[62109]: INFO nova.compute.manager [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 732.050473] env[62109]: DEBUG oslo.service.loopingcall [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.050716] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116222, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.053924] env[62109]: DEBUG nova.compute.manager [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 732.054036] env[62109]: DEBUG nova.network.neutron [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 732.055826] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116223, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.281357] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea453bac-429f-47cf-8a41-005e4b04babb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.293498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8d0516-bb6c-4a6a-9ace-f455d76d48a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.327498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4075ce-6068-40b6-b2bb-0c9f68ec9a4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.335598] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a2fd89-60fb-44c0-b62f-2c49e575ce92 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.350114] env[62109]: DEBUG nova.compute.provider_tree [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.456420] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116218, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541917} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.456420] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/c753a2db-d701-4508-88bd-4ebe4f32a075.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 732.456420] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.456420] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bfcbd61-1806-4875-a79b-4b7ca0940ca6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.461830] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 732.461830] env[62109]: value = "task-1116224" [ 732.461830] env[62109]: _type = "Task" [ 732.461830] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.472698] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.533870] env[62109]: DEBUG oslo_vmware.api [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116222, 'name': PowerOnVM_Task, 'duration_secs': 0.479711} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.534859] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 732.535162] env[62109]: INFO nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Took 7.95 seconds to spawn the instance on the hypervisor. [ 732.535344] env[62109]: DEBUG nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 732.536302] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da7129-a5a0-4cc7-bc4a-93d17d50ee8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.542538] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116223, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.655199] env[62109]: DEBUG nova.compute.manager [req-52ac4bde-bbf2-43dc-bc4d-456938a0ba1e req-787a5499-cb2b-4264-a156-e877c5439648 service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Received event network-vif-deleted-05de4996-e885-4ff3-9685-950d1d793e43 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 732.655199] env[62109]: INFO nova.compute.manager [req-52ac4bde-bbf2-43dc-bc4d-456938a0ba1e req-787a5499-cb2b-4264-a156-e877c5439648 service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Neutron deleted interface 05de4996-e885-4ff3-9685-950d1d793e43; detaching it from the instance and deleting it from the info cache [ 732.655199] env[62109]: DEBUG nova.network.neutron [req-52ac4bde-bbf2-43dc-bc4d-456938a0ba1e req-787a5499-cb2b-4264-a156-e877c5439648 service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.705799] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 732.733048] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 732.733356] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 732.733526] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.733708] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 732.733867] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.733995] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 732.734212] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 732.734371] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 732.734537] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 732.734699] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 732.734870] env[62109]: DEBUG nova.virt.hardware [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.735791] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f43490-a640-44fe-8f67-f6ff94232475 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.744269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4d8a2c-8dca-4628-8355-fd2a474eea25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.853112] env[62109]: DEBUG nova.scheduler.client.report [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.971905] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083299} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.972186] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.973111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e71507-58f2-4fce-aeee-9f59534f875f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.995017] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/c753a2db-d701-4508-88bd-4ebe4f32a075.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.995338] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a702aba2-2890-4ce4-abe2-06d965bc36dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.015412] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 733.015412] env[62109]: value = "task-1116225" [ 733.015412] env[62109]: _type = "Task" [ 733.015412] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.023413] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.035927] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116223, 'name': CreateSnapshot_Task, 'duration_secs': 0.794407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.036335] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 733.037061] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ea3664-f1b4-4c0a-8b8e-5bbd8f7f03d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.058534] env[62109]: INFO nova.compute.manager [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Took 46.21 seconds to build instance. [ 733.101744] env[62109]: DEBUG nova.network.neutron [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.159599] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56599892-2d76-4c26-ba33-8c453ecad540 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.170981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33ee870-4bb4-4e43-aeaf-285a1948e43a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.201894] env[62109]: DEBUG nova.compute.manager [req-52ac4bde-bbf2-43dc-bc4d-456938a0ba1e req-787a5499-cb2b-4264-a156-e877c5439648 service nova] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Detach interface failed, port_id=05de4996-e885-4ff3-9685-950d1d793e43, reason: Instance d727d597-c4ac-426e-bdc3-fc4f73a3eac9 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 733.359655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.360130] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 733.362946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.131s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.364849] env[62109]: INFO nova.compute.claims [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.526033] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116225, 'name': ReconfigVM_Task, 'duration_secs': 0.2761} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.526033] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Reconfigured VM instance instance-00000025 to attach disk [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/c753a2db-d701-4508-88bd-4ebe4f32a075.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.526708] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b6bda92-0a72-49d9-ac13-5165b50d3bc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.532917] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 733.532917] env[62109]: value = "task-1116226" [ 733.532917] env[62109]: _type = "Task" [ 733.532917] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.541618] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116226, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.556317] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 733.556317] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8572b030-0e62-4c00-beb6-e0c146bf8fe8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.560299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdbe51f9-c6b6-41af-bf9a-7b6d5b2e133d tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.284s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.568698] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 733.568698] env[62109]: value = "task-1116227" [ 733.568698] env[62109]: _type = "Task" [ 733.568698] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.573069] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116227, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.602308] env[62109]: INFO nova.compute.manager [-] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Took 1.55 seconds to deallocate network for instance. [ 733.754349] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Successfully updated port: 76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.870308] env[62109]: DEBUG nova.compute.utils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.875048] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 733.875284] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 733.930688] env[62109]: DEBUG nova.policy [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c06648376fab4d3fa7c17ae52f1707b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd82badf8179445539adbfaf477a72ede', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 734.047883] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116226, 'name': Rename_Task, 'duration_secs': 0.135798} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.048290] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.048589] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2380bc0f-f904-4519-806d-0f7208989661 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.056473] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 734.056473] env[62109]: value = "task-1116228" [ 734.056473] env[62109]: _type = "Task" [ 734.056473] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.063254] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.080567] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116228, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.085915] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116227, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.109148] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.259619] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.259995] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.260480] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 734.306603] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Successfully created port: c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.378716] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 734.585273] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.595428] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116227, 'name': CloneVM_Task} progress is 95%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.601424] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.788545] env[62109]: DEBUG nova.compute.manager [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Received event network-vif-plugged-76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.788842] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.788958] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.789144] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.789283] env[62109]: DEBUG nova.compute.manager [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] No waiting events found dispatching network-vif-plugged-76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 734.789448] env[62109]: WARNING nova.compute.manager [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Received unexpected event network-vif-plugged-76f15b7e-4103-4568-8042-248ee15513dc for instance with vm_state building and task_state spawning. [ 734.789606] env[62109]: DEBUG nova.compute.manager [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Received event network-changed-76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.789757] env[62109]: DEBUG nova.compute.manager [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Refreshing instance network info cache due to event network-changed-76f15b7e-4103-4568-8042-248ee15513dc. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 734.789919] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Acquiring lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.802273] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.833823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb3b755-f544-4777-a36b-0188aa28a4b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.840427] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9858f92f-b8ec-4420-8368-d93bdf195487 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.874170] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ca5728-f42a-47b5-87cf-59552853c392 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.884881] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363e9487-0a43-4fd4-b111-59300f1e9aec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.904537] env[62109]: DEBUG nova.compute.provider_tree [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.962740] env[62109]: DEBUG nova.network.neutron [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.070441] env[62109]: DEBUG oslo_vmware.api [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116228, 'name': PowerOnVM_Task, 'duration_secs': 0.581944} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.070528] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 735.070713] env[62109]: INFO nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Took 7.92 seconds to spawn the instance on the hypervisor. [ 735.070921] env[62109]: DEBUG nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 735.071781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7611c227-6abb-450a-aece-0a16bccd7a24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.086980] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116227, 'name': CloneVM_Task, 'duration_secs': 1.319709} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.087486] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Created linked-clone VM from snapshot [ 735.088908] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdf7406-6e64-4b15-93ea-26d03c142def {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.096020] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Uploading image b63b1f1b-1ccc-41fa-a8af-9e0f9cada2d3 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 735.125392] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 735.125392] env[62109]: value = "vm-244367" [ 735.125392] env[62109]: _type = "VirtualMachine" [ 735.125392] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 735.125971] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-550bac30-08fc-40b6-9458-77e7f80e8b77 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.135202] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lease: (returnval){ [ 735.135202] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e9cf6d-aca9-1d96-2aa2-73a44fac9d99" [ 735.135202] env[62109]: _type = "HttpNfcLease" [ 735.135202] env[62109]: } obtained for exporting VM: (result){ [ 735.135202] env[62109]: value = "vm-244367" [ 735.135202] env[62109]: _type = "VirtualMachine" [ 735.135202] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 735.135479] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the lease: (returnval){ [ 735.135479] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e9cf6d-aca9-1d96-2aa2-73a44fac9d99" [ 735.135479] env[62109]: _type = "HttpNfcLease" [ 735.135479] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 735.142654] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 735.142654] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e9cf6d-aca9-1d96-2aa2-73a44fac9d99" [ 735.142654] env[62109]: _type = "HttpNfcLease" [ 735.142654] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 735.397252] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 735.408269] env[62109]: DEBUG nova.scheduler.client.report [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.423964] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 735.424234] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 735.424396] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.424576] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 735.424722] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.424869] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 735.425083] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 735.425244] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 735.425474] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 735.425651] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 735.425819] env[62109]: DEBUG nova.virt.hardware [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 735.426742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3962d71-9210-45ef-922d-5e5dc2ecf601 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.435842] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d410cec-1a8b-4893-9929-a91d6de4b919 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.466142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.466523] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Instance network_info: |[{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 735.467105] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Acquired lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.467294] env[62109]: DEBUG nova.network.neutron [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Refreshing network info cache for port 76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 735.468429] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:0c:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76f15b7e-4103-4568-8042-248ee15513dc', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.475769] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Creating folder: Project (7bff923ccb02449aa834523a0652cbdb). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 735.478702] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c5085b9-3990-4baa-9168-220cd8a93493 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.490157] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Created folder: Project (7bff923ccb02449aa834523a0652cbdb) in parent group-v244329. [ 735.490348] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Creating folder: Instances. Parent ref: group-v244368. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 735.490568] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ae02450-beee-46a8-9eab-360021bce52a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.500601] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Created folder: Instances in parent group-v244368. [ 735.500832] env[62109]: DEBUG oslo.service.loopingcall [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.501034] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 735.501243] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01d81aee-ad4e-481b-84ec-dee6d787c9cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.527892] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.527892] env[62109]: value = "task-1116232" [ 735.527892] env[62109]: _type = "Task" [ 735.527892] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.537685] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116232, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.598111] env[62109]: INFO nova.compute.manager [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Took 46.76 seconds to build instance. [ 735.644304] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 735.644304] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e9cf6d-aca9-1d96-2aa2-73a44fac9d99" [ 735.644304] env[62109]: _type = "HttpNfcLease" [ 735.644304] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 735.644614] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 735.644614] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e9cf6d-aca9-1d96-2aa2-73a44fac9d99" [ 735.644614] env[62109]: _type = "HttpNfcLease" [ 735.644614] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 735.645435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55487f20-dd11-4d9f-a832-0598993ecb9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.653831] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 735.654018] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 735.751721] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-823d177b-2eb3-49d3-8a63-257cf6ed44a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.913326] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.914015] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.917141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.661s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.918672] env[62109]: INFO nova.compute.claims [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.994691] env[62109]: DEBUG nova.network.neutron [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updated VIF entry in instance network info cache for port 76f15b7e-4103-4568-8042-248ee15513dc. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 735.995059] env[62109]: DEBUG nova.network.neutron [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.037955] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116232, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.102035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f66cec21-c734-4075-8978-a4be06c48a10 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.449s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.151230] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Successfully updated port: c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.418498] env[62109]: DEBUG nova.compute.utils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.421054] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 736.421228] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 736.497050] env[62109]: DEBUG nova.policy [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c06648376fab4d3fa7c17ae52f1707b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd82badf8179445539adbfaf477a72ede', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 736.500139] env[62109]: DEBUG oslo_concurrency.lockutils [req-6cddaf09-a668-4246-b555-2f8ff5e73b89 req-2f92ff82-d7ec-4697-94e4-319f27210643 service nova] Releasing lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.541509] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116232, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.606250] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 736.654053] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.654266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.654415] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.787640] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Successfully created port: 86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.921725] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 737.000699] env[62109]: DEBUG nova.compute.manager [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Received event network-vif-plugged-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.000963] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Acquiring lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.004113] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.008023] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.008023] env[62109]: DEBUG nova.compute.manager [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] No waiting events found dispatching network-vif-plugged-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 737.008023] env[62109]: WARNING nova.compute.manager [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Received unexpected event network-vif-plugged-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a for instance with vm_state building and task_state spawning. [ 737.008023] env[62109]: DEBUG nova.compute.manager [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Received event network-changed-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.008023] env[62109]: DEBUG nova.compute.manager [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Refreshing instance network info cache due to event network-changed-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 737.008374] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Acquiring lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.042951] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116232, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.100971] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.101300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.101521] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.102068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.102068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.105103] env[62109]: INFO nova.compute.manager [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Terminating instance [ 737.109732] env[62109]: DEBUG nova.compute.manager [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 737.109732] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.113032] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb60dae-1ae5-49de-a59a-5514f0fa931e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.127922] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 737.131758] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5237096a-fc5f-45b3-b79d-e2d6db66bf89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.136343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.139712] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 737.139712] env[62109]: value = "task-1116233" [ 737.139712] env[62109]: _type = "Task" [ 737.139712] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.150225] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.207360] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.252352] env[62109]: INFO nova.compute.manager [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Rescuing [ 737.252649] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.252845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.253069] env[62109]: DEBUG nova.network.neutron [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.433586] env[62109]: DEBUG nova.network.neutron [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Updating instance_info_cache with network_info: [{"id": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "address": "fa:16:3e:d6:97:c9", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53b0b3c-0c", "ovs_interfaceid": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.542653] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf86003-d6b5-4c16-85a4-78b3d51e7267 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.552320] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116232, 'name': CreateVM_Task, 'duration_secs': 1.532884} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.553240] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 737.554065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.554267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.554642] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.557309] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55dff49e-82f0-437d-b829-4cc6d4bc6d80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.560742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce86ba0-fd0c-4d3d-8401-1fdee6ac2d7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.567779] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 737.567779] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5253f961-3f60-e57b-edeb-f654c7ad57e1" [ 737.567779] env[62109]: _type = "Task" [ 737.567779] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.601444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8899bdb6-8e45-4ed2-b83b-e7504fa81ae2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.612105] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5253f961-3f60-e57b-edeb-f654c7ad57e1, 'name': SearchDatastore_Task, 'duration_secs': 0.026035} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.617477] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.617761] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.618046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.618205] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.618392] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.618731] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cecbdca8-7a50-46fa-b433-71260200a4f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.621937] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac467a9-359b-41e4-b0ed-d0756c7a78c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.637556] env[62109]: DEBUG nova.compute.provider_tree [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.642026] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.642026] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 737.642026] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db3c612f-1f2f-4546-a87a-fdebe46f0649 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.653794] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116233, 'name': PowerOffVM_Task, 'duration_secs': 0.329678} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.654023] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 737.654023] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d7a6e-e529-a7ff-5aec-ce2f51721b41" [ 737.654023] env[62109]: _type = "Task" [ 737.654023] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.654416] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 737.654718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 737.655146] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ce86034-3441-4a4d-9bd1-90a526c0864a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.666596] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d7a6e-e529-a7ff-5aec-ce2f51721b41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.743667] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 737.743990] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 737.744230] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Deleting the datastore file [datastore2] 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 737.744614] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbe5449e-6e37-4fe5-b0ee-1f418c46122b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.755318] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for the task: (returnval){ [ 737.755318] env[62109]: value = "task-1116235" [ 737.755318] env[62109]: _type = "Task" [ 737.755318] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.768977] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.938158] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 737.940330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.940330] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Instance network_info: |[{"id": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "address": "fa:16:3e:d6:97:c9", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53b0b3c-0c", "ovs_interfaceid": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 737.940786] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Acquired lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.940997] env[62109]: DEBUG nova.network.neutron [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Refreshing network info cache for port c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 737.942343] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:97:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.954684] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating folder: Project (d82badf8179445539adbfaf477a72ede). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.958814] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6add25f-0976-4aa0-a742-b418e5c78aa6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.974553] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Created folder: Project (d82badf8179445539adbfaf477a72ede) in parent group-v244329. [ 737.974553] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating folder: Instances. Parent ref: group-v244371. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.976463] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fec7431c-3bcb-4d74-b3c7-1a8ae03ea25a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.985326] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.985598] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.985783] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.985973] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.986296] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.986479] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.986726] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.986912] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.987134] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.987344] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.987556] env[62109]: DEBUG nova.virt.hardware [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.988568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbda4e9-aae0-4d6f-b8e6-a31688f9687b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.993176] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Created folder: Instances in parent group-v244371. [ 737.993510] env[62109]: DEBUG oslo.service.loopingcall [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.994203] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 737.994481] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-197feb4e-f7a2-4e1a-9171-bb54f5d85fab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.019853] env[62109]: DEBUG nova.network.neutron [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [{"id": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "address": "fa:16:3e:b0:2a:e9", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e271d5-4d", "ovs_interfaceid": "23e271d5-4d3a-4ad9-934f-4123916de8c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.027510] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3026908e-11aa-497d-922d-e8bf20ec4214 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.034637] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.034637] env[62109]: value = "task-1116238" [ 738.034637] env[62109]: _type = "Task" [ 738.034637] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.061502] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116238, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.143055] env[62109]: DEBUG nova.scheduler.client.report [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.168038] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525d7a6e-e529-a7ff-5aec-ce2f51721b41, 'name': SearchDatastore_Task, 'duration_secs': 0.022812} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.169496] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f184969-cafd-4fbe-af8a-2a7166a53c18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.175827] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 738.175827] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522fa66e-7cbc-d84b-96f1-1ade2d6304c1" [ 738.175827] env[62109]: _type = "Task" [ 738.175827] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.187230] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522fa66e-7cbc-d84b-96f1-1ade2d6304c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.268663] env[62109]: DEBUG oslo_vmware.api [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Task: {'id': task-1116235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35962} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.268942] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 738.269214] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 738.269480] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 738.269668] env[62109]: INFO nova.compute.manager [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 738.270130] env[62109]: DEBUG oslo.service.loopingcall [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.270747] env[62109]: DEBUG nova.compute.manager [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.270747] env[62109]: DEBUG nova.network.neutron [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 738.485158] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Successfully updated port: 86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.523588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.549057] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116238, 'name': CreateVM_Task, 'duration_secs': 0.367586} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.549057] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 738.549256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.549413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.549763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 738.550213] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-104bd9cf-e1bf-4905-adb4-a6209221ca61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.556935] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 738.556935] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52be2fe3-b9e5-080d-58b5-dadbe04a7a57" [ 738.556935] env[62109]: _type = "Task" [ 738.556935] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.570302] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52be2fe3-b9e5-080d-58b5-dadbe04a7a57, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.574264] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.574552] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.574801] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.650435] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.650826] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 738.655494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.655s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.660828] env[62109]: INFO nova.compute.claims [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.704461] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522fa66e-7cbc-d84b-96f1-1ade2d6304c1, 'name': SearchDatastore_Task, 'duration_secs': 0.012157} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.704461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.704461] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1399f618-3a93-4731-a59b-f98306d6cd52/1399f618-3a93-4731-a59b-f98306d6cd52.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 738.704926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.705269] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.705611] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9682891-d90d-437a-89ad-f73f8b48224b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.709547] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7578265-3ff6-4cec-9f77-ae82f700c518 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.720041] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 738.720041] env[62109]: value = "task-1116239" [ 738.720041] env[62109]: _type = "Task" [ 738.720041] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.727145] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.727581] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 738.730137] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a65d5a9-673f-40bd-bd38-f1af56cdf25f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.737649] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.741906] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 738.741906] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522596bf-3140-3910-5e08-255d426f8c37" [ 738.741906] env[62109]: _type = "Task" [ 738.741906] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.758492] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522596bf-3140-3910-5e08-255d426f8c37, 'name': SearchDatastore_Task, 'duration_secs': 0.00858} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.759755] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e52fda-52b2-4902-aade-2a9ed5eb573d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.766953] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 738.766953] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52939aa7-55a3-6c1d-7f34-5f59dc5a2bfe" [ 738.766953] env[62109]: _type = "Task" [ 738.766953] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.776266] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52939aa7-55a3-6c1d-7f34-5f59dc5a2bfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.989939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.990120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.990293] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.995929] env[62109]: DEBUG nova.network.neutron [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Updated VIF entry in instance network info cache for port c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 738.996276] env[62109]: DEBUG nova.network.neutron [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Updating instance_info_cache with network_info: [{"id": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "address": "fa:16:3e:d6:97:c9", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53b0b3c-0c", "ovs_interfaceid": "c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.083532] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.085693] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e106f6ff-575e-41eb-9514-0b69bb35fd2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.096286] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 739.096286] env[62109]: value = "task-1116240" [ 739.096286] env[62109]: _type = "Task" [ 739.096286] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.113445] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.146214] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Received event network-vif-plugged-86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.146214] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Acquiring lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.146214] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.146214] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.146214] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] No waiting events found dispatching network-vif-plugged-86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.146650] env[62109]: WARNING nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Received unexpected event network-vif-plugged-86903ba6-b62e-4930-a50b-26da3e8cfb63 for instance with vm_state building and task_state spawning. [ 739.146760] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Received event network-changed-86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.146967] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Refreshing instance network info cache due to event network-changed-86903ba6-b62e-4930-a50b-26da3e8cfb63. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 739.147294] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Acquiring lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.170675] env[62109]: DEBUG nova.compute.utils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 739.172088] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 739.172286] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 739.192112] env[62109]: DEBUG nova.network.neutron [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.233135] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494564} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.233650] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1399f618-3a93-4731-a59b-f98306d6cd52/1399f618-3a93-4731-a59b-f98306d6cd52.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 739.234034] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.234281] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3035c59c-8a94-48ed-933e-347b68f7d7d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.244552] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 739.244552] env[62109]: value = "task-1116241" [ 739.244552] env[62109]: _type = "Task" [ 739.244552] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.262041] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.263577] env[62109]: DEBUG nova.policy [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42dfa9389c91465cbd87a83310117faa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb674b9896af406aad0bc08bb8a63c72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.282284] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52939aa7-55a3-6c1d-7f34-5f59dc5a2bfe, 'name': SearchDatastore_Task, 'duration_secs': 0.008827} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.282738] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.283110] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 55381bef-dab5-44cd-97fe-9fc75ab61d0e/55381bef-dab5-44cd-97fe-9fc75ab61d0e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 739.283443] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-070b9a67-34ee-4dc1-aeba-c0eddf9e45d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.291278] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 739.291278] env[62109]: value = "task-1116242" [ 739.291278] env[62109]: _type = "Task" [ 739.291278] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.304229] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.500698] env[62109]: DEBUG oslo_concurrency.lockutils [req-80b9a2bd-3f6e-42bb-8cf8-c9eaebb8b259 req-1e32ba5c-8169-4793-87c4-8de1e749bc5e service nova] Releasing lock "refresh_cache-55381bef-dab5-44cd-97fe-9fc75ab61d0e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.541085] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.606475] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116240, 'name': PowerOffVM_Task, 'duration_secs': 0.214647} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.609546] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.610675] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee4f5bf-dffb-4e8a-b928-e4dafe7c200a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.632374] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14bf647-57c4-4e8b-9523-46117e2201de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.678167] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.692788] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.693201] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34aa4c4f-3f34-416a-bbb6-658ad5e070e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.695301] env[62109]: INFO nova.compute.manager [-] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Took 1.42 seconds to deallocate network for instance. [ 739.701869] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 739.701869] env[62109]: value = "task-1116243" [ 739.701869] env[62109]: _type = "Task" [ 739.701869] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.721135] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 739.722201] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.722201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.722201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.722201] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.725024] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad51e79-6cf3-4738-97a4-244e8f58310a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.732888] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.732888] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 739.732888] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2765ec56-f392-4914-8087-474e9fa019c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.746598] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 739.746598] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52633139-f702-502d-f259-ddc54602f8b0" [ 739.746598] env[62109]: _type = "Task" [ 739.746598] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.767059] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52633139-f702-502d-f259-ddc54602f8b0, 'name': SearchDatastore_Task, 'duration_secs': 0.008288} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.767438] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083219} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.768472] env[62109]: DEBUG nova.network.neutron [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Updating instance_info_cache with network_info: [{"id": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "address": "fa:16:3e:47:53:ab", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86903ba6-b6", "ovs_interfaceid": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.773602] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.773994] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a205d7a-655a-4c06-8b1c-059ff564ef43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.777711] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45fa928-b3fd-45c5-a73f-be8decf47c57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.802347] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 1399f618-3a93-4731-a59b-f98306d6cd52/1399f618-3a93-4731-a59b-f98306d6cd52.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.811486] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a730572-3a8d-4ea2-af29-9c8799bb513e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.826529] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 739.826529] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a371f-567d-b7cf-088e-5ed63eabf1f1" [ 739.826529] env[62109]: _type = "Task" [ 739.826529] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.835830] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447114} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.836524] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 739.836524] env[62109]: value = "task-1116244" [ 739.836524] env[62109]: _type = "Task" [ 739.836524] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.837096] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 55381bef-dab5-44cd-97fe-9fc75ab61d0e/55381bef-dab5-44cd-97fe-9fc75ab61d0e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 739.837347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.837655] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f4122f5-a472-4b4d-9461-9bda4d1c3f62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.848362] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a371f-567d-b7cf-088e-5ed63eabf1f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009765} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.849513] env[62109]: DEBUG oslo_concurrency.lockutils [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.849753] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 739.850067] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14539e9c-0456-48fc-b5b4-188de685ef4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.856745] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.857661] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 739.857661] env[62109]: value = "task-1116245" [ 739.857661] env[62109]: _type = "Task" [ 739.857661] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.863696] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 739.863696] env[62109]: value = "task-1116246" [ 739.863696] env[62109]: _type = "Task" [ 739.863696] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.871134] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116245, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.876595] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.888635] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Successfully created port: bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.206235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.276740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.277197] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Instance network_info: |[{"id": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "address": "fa:16:3e:47:53:ab", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86903ba6-b6", "ovs_interfaceid": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 740.278075] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Acquired lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.278342] env[62109]: DEBUG nova.network.neutron [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Refreshing network info cache for port 86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.281462] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:53:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86903ba6-b62e-4930-a50b-26da3e8cfb63', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.293244] env[62109]: DEBUG oslo.service.loopingcall [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.295816] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.296895] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397f41f4-2df3-4030-ba4b-22c59f84217f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.300562] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66bf8761-e69f-43ea-abcf-feb3f65acce1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.327111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0a792d-26dd-4ca3-bf50-82c3ff33117a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.330599] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.330599] env[62109]: value = "task-1116247" [ 740.330599] env[62109]: _type = "Task" [ 740.330599] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.368824] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da291fd-92d3-41b3-b068-70380515b7fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.374195] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116247, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.382684] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116244, 'name': ReconfigVM_Task, 'duration_secs': 0.325025} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.390146] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 1399f618-3a93-4731-a59b-f98306d6cd52/1399f618-3a93-4731-a59b-f98306d6cd52.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.390937] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116246, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445936} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.391184] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116245, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082022} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.391393] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b58ee5b-4aa7-4a35-8ba7-e8f6910466d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.394174] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67f9d2b-4d33-41f4-9498-39282514aec2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.398983] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 740.399359] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.400137] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f485d53-81a7-4c1f-b8c0-75cebde62180 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.405574] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf56c4c9-611e-4658-9a1f-aa2b88fa4bfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.453269] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 55381bef-dab5-44cd-97fe-9fc75ab61d0e/55381bef-dab5-44cd-97fe-9fc75ab61d0e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.460818] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.461529] env[62109]: DEBUG nova.compute.provider_tree [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.463012] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 740.463012] env[62109]: value = "task-1116248" [ 740.463012] env[62109]: _type = "Task" [ 740.463012] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.463250] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2b20a5c-f981-4131-bc67-85d6cb0ef799 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.477643] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7caca47-f371-45b0-9df2-19ab6224774e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.492900] env[62109]: DEBUG nova.scheduler.client.report [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.506875] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116248, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.509435] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 740.509435] env[62109]: value = "task-1116249" [ 740.509435] env[62109]: _type = "Task" [ 740.509435] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.510651] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 740.510651] env[62109]: value = "task-1116250" [ 740.510651] env[62109]: _type = "Task" [ 740.510651] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.522239] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.525722] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.693783] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 740.718240] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.718631] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.718808] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.719062] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.719279] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.719458] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.719703] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.719906] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.720131] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.720351] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.720572] env[62109]: DEBUG nova.virt.hardware [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.721532] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eb016c-7a47-4bfc-adf2-530230391ab5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.729656] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072d06b7-23b6-48ed-9aeb-61db062231e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.841622] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116247, 'name': CreateVM_Task, 'duration_secs': 0.399126} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.841622] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 740.841872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.841872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.842268] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 740.842585] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316134c5-ca3e-4846-912a-7e6066c79a26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.847835] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 740.847835] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5237fc10-4ac6-20b2-95b6-8f7815b99a24" [ 740.847835] env[62109]: _type = "Task" [ 740.847835] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.855710] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5237fc10-4ac6-20b2-95b6-8f7815b99a24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.001514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.001770] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 741.004254] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116248, 'name': Rename_Task, 'duration_secs': 0.140568} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.004424] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.261s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.005873] env[62109]: INFO nova.compute.claims [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.008280] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 741.008737] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b389b60e-7d1d-4d09-995c-5526ed62ed08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.017689] env[62109]: DEBUG nova.network.neutron [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Updated VIF entry in instance network info cache for port 86903ba6-b62e-4930-a50b-26da3e8cfb63. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 741.017689] env[62109]: DEBUG nova.network.neutron [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Updating instance_info_cache with network_info: [{"id": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "address": "fa:16:3e:47:53:ab", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86903ba6-b6", "ovs_interfaceid": "86903ba6-b62e-4930-a50b-26da3e8cfb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.020327] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 741.020327] env[62109]: value = "task-1116251" [ 741.020327] env[62109]: _type = "Task" [ 741.020327] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.028739] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116249, 'name': ReconfigVM_Task, 'duration_secs': 0.316289} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.028973] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116250, 'name': ReconfigVM_Task, 'duration_secs': 0.314555} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.029587] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Reconfigured VM instance instance-00000025 to attach disk [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.029957] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 55381bef-dab5-44cd-97fe-9fc75ab61d0e/55381bef-dab5-44cd-97fe-9fc75ab61d0e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.031100] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239247f0-d92a-4358-a258-fa2c99d74152 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.033485] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bed945dd-cb08-47bc-975f-524e3fab99c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.040134] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.045669] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 741.045669] env[62109]: value = "task-1116252" [ 741.045669] env[62109]: _type = "Task" [ 741.045669] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.070645] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d764789-a4fe-4f3b-9b4b-2fb45afa0b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.089018] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116252, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.089415] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 741.089415] env[62109]: value = "task-1116253" [ 741.089415] env[62109]: _type = "Task" [ 741.089415] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.099320] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.358590] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5237fc10-4ac6-20b2-95b6-8f7815b99a24, 'name': SearchDatastore_Task, 'duration_secs': 0.01067} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.358924] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.359169] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.359421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.359585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.359779] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.360061] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52e1d6fc-04c7-4c28-8eee-e2c32321b4e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.368422] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.368628] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 741.369415] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-446e8a5a-7894-43a7-9fc8-2c470962dc68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.374972] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 741.374972] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529c920f-4a69-4097-b14d-8fa477ef0ec5" [ 741.374972] env[62109]: _type = "Task" [ 741.374972] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.383241] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529c920f-4a69-4097-b14d-8fa477ef0ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.512178] env[62109]: DEBUG nova.compute.utils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.512914] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 741.513105] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 741.521047] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] Releasing lock "refresh_cache-a9fb75d5-e303-4f31-888d-528963ab23b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.521047] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Received event network-vif-deleted-4fbac6b8-6bfd-4785-aed0-7407c636e189 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.521047] env[62109]: INFO nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Neutron deleted interface 4fbac6b8-6bfd-4785-aed0-7407c636e189; detaching it from the instance and deleting it from the info cache [ 741.521277] env[62109]: DEBUG nova.network.neutron [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.532678] env[62109]: DEBUG oslo_vmware.api [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116251, 'name': PowerOnVM_Task, 'duration_secs': 0.501215} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.533492] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 741.533687] env[62109]: INFO nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Took 8.83 seconds to spawn the instance on the hypervisor. [ 741.533871] env[62109]: DEBUG nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 741.534835] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b732b48d-d948-4d53-b3b0-295722b8fdc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.567300] env[62109]: DEBUG nova.policy [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9974bf0499d54eff8e2c1604f5a263f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eff7610abe6345049cbd1e2512f72e81', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.573522] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116252, 'name': Rename_Task, 'duration_secs': 0.152609} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.573798] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 741.574100] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e267e595-ef5d-4668-a925-85b06953dd97 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.581274] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 741.581274] env[62109]: value = "task-1116254" [ 741.581274] env[62109]: _type = "Task" [ 741.581274] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.590855] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.599744] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116253, 'name': ReconfigVM_Task, 'duration_secs': 0.194393} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.600222] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 741.600502] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-758a7a3e-12cf-44de-a40e-1107fe4335e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.606017] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 741.606017] env[62109]: value = "task-1116255" [ 741.606017] env[62109]: _type = "Task" [ 741.606017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.614431] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.871866] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Successfully created port: 5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.888442] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529c920f-4a69-4097-b14d-8fa477ef0ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.889597] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d18a3a2-e758-4875-b0e7-0f9f3626b54a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.896903] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 741.896903] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a118ff-1a7f-cd9b-7725-42c1da896fc6" [ 741.896903] env[62109]: _type = "Task" [ 741.896903] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.907816] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a118ff-1a7f-cd9b-7725-42c1da896fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.016874] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 742.034278] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f69cb58d-f9cf-44cc-8554-295a653472fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.048659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd37f097-b02b-4781-b07a-03e08e180da3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.090266] env[62109]: INFO nova.compute.manager [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Took 32.35 seconds to build instance. [ 742.095186] env[62109]: DEBUG oslo_vmware.api [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116254, 'name': PowerOnVM_Task, 'duration_secs': 0.486949} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.103290] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 742.103559] env[62109]: INFO nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 6.71 seconds to spawn the instance on the hypervisor. [ 742.103754] env[62109]: DEBUG nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 742.104680] env[62109]: DEBUG nova.compute.manager [req-d7303091-75c6-45c5-a0fc-25cc655bd353 req-296cb864-e041-4b79-ae62-39d58dac5828 service nova] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Detach interface failed, port_id=4fbac6b8-6bfd-4785-aed0-7407c636e189, reason: Instance 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 742.109662] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf95fa7-dbde-4b33-aa6e-0d08730be8e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.126071] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116255, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.136959] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Successfully updated port: bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 742.140941] env[62109]: DEBUG nova.compute.manager [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Received event network-vif-plugged-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.141018] env[62109]: DEBUG oslo_concurrency.lockutils [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.141565] env[62109]: DEBUG oslo_concurrency.lockutils [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] Lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.141565] env[62109]: DEBUG oslo_concurrency.lockutils [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] Lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.141565] env[62109]: DEBUG nova.compute.manager [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] No waiting events found dispatching network-vif-plugged-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 742.141857] env[62109]: WARNING nova.compute.manager [req-fdf093a8-3193-4c74-b97c-cb483d304981 req-ed4e827b-ea7a-4c37-afa0-4b67d3ee8abf service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Received unexpected event network-vif-plugged-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 for instance with vm_state building and task_state spawning. [ 742.407938] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a118ff-1a7f-cd9b-7725-42c1da896fc6, 'name': SearchDatastore_Task, 'duration_secs': 0.01037} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.408238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.408488] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] a9fb75d5-e303-4f31-888d-528963ab23b7/a9fb75d5-e303-4f31-888d-528963ab23b7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 742.408933] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f8cdfb6-ee73-42ee-93e9-8f6c1d6c6017 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.417494] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 742.417494] env[62109]: value = "task-1116256" [ 742.417494] env[62109]: _type = "Task" [ 742.417494] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.425672] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.544692] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43098de8-7814-4e6d-b2fb-b887ead2e1eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.552560] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b2ed99-5b0b-4db7-9e16-930d66c581fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.585959] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba7206b-8e34-4d1f-80fe-fb9618bb3a21 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.594396] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ebc0ae-941d-43fa-a117-0d05822e660b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.598999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3f8ebd7d-3c14-4a1a-98a2-98e85bd81a12 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.898s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.611521] env[62109]: DEBUG nova.compute.provider_tree [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.622388] env[62109]: DEBUG oslo_vmware.api [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116255, 'name': PowerOnVM_Task, 'duration_secs': 0.637003} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.622758] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 742.625751] env[62109]: DEBUG nova.compute.manager [None req-25b28e7b-dcb5-4a69-b693-5784ef12ed9f tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 742.626582] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c206d310-52f8-466b-aac1-4e26ed12313f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.638355] env[62109]: INFO nova.compute.manager [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 28.14 seconds to build instance. [ 742.641949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.641949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.641949] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 742.927804] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116256, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.039567] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 743.104211] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 743.104487] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 743.104652] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.104839] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 743.105181] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.105364] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 743.105624] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 743.105799] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 743.105973] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 743.106161] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.106342] env[62109]: DEBUG nova.virt.hardware [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.106762] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.109859] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9eb8ce4-790e-4290-addc-bbb57f1931eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.114047] env[62109]: DEBUG nova.scheduler.client.report [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.124168] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f7048-6e7c-4045-a4fe-453656f08cb9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.153921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bdd5ce8b-c1ce-4af9-8bb2-0ad6d6fd641f tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.894s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.259593] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 743.432131] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665958} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.432517] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] a9fb75d5-e303-4f31-888d-528963ab23b7/a9fb75d5-e303-4f31-888d-528963ab23b7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 743.432747] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.433120] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef491bf2-4a0c-4ff9-b379-41c4ca66c30c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.439787] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 743.439787] env[62109]: value = "task-1116257" [ 743.439787] env[62109]: _type = "Task" [ 743.439787] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.450515] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.464156] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Successfully updated port: 5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.497751] env[62109]: DEBUG nova.compute.manager [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Received event network-vif-plugged-5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 743.498245] env[62109]: DEBUG oslo_concurrency.lockutils [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] Acquiring lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.498498] env[62109]: DEBUG oslo_concurrency.lockutils [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.498684] env[62109]: DEBUG oslo_concurrency.lockutils [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.498883] env[62109]: DEBUG nova.compute.manager [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] No waiting events found dispatching network-vif-plugged-5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 743.500370] env[62109]: WARNING nova.compute.manager [req-49b5f5b4-b936-4fbe-b26d-b069dea20bca req-dba8f7ec-8844-4367-8a9d-92246fceb038 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Received unexpected event network-vif-plugged-5a5722bc-d005-4ebd-8e52-08cbad2eb313 for instance with vm_state building and task_state spawning. [ 743.624272] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.624778] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.627684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.757s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.629374] env[62109]: INFO nova.compute.claims [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.643171] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.659943] env[62109]: DEBUG nova.network.neutron [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating instance_info_cache with network_info: [{"id": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "address": "fa:16:3e:d7:01:2c", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb65c0b6-de", "ovs_interfaceid": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.661856] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.951747] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080942} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.952113] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.952904] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f6f9aa-a9b9-42bc-b166-52cb9adbde2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.971364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.971568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquired lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.972611] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 743.984349] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] a9fb75d5-e303-4f31-888d-528963ab23b7/a9fb75d5-e303-4f31-888d-528963ab23b7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.986201] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e41d3524-759b-4772-a057-633d54a70c2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.009019] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 744.009019] env[62109]: value = "task-1116258" [ 744.009019] env[62109]: _type = "Task" [ 744.009019] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.018870] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.145703] env[62109]: DEBUG nova.compute.utils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.145703] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 744.145703] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 744.165876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.166149] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Instance network_info: |[{"id": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "address": "fa:16:3e:d7:01:2c", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb65c0b6-de", "ovs_interfaceid": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 744.170979] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:01:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91b0f7e5-0d1a-46e2-bf73-09656211dea2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb65c0b6-debe-49a6-a623-fc3778c5b9a8', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.178408] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Creating folder: Project (fb674b9896af406aad0bc08bb8a63c72). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 744.181161] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-daf401e5-ed6d-41cc-bc30-504a132b8d24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.195363] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Created folder: Project (fb674b9896af406aad0bc08bb8a63c72) in parent group-v244329. [ 744.196037] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Creating folder: Instances. Parent ref: group-v244375. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 744.196833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.197174] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-182b8a35-7826-4fd6-802a-e7ee29ff4601 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.209611] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Created folder: Instances in parent group-v244375. [ 744.209884] env[62109]: DEBUG oslo.service.loopingcall [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 744.210137] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 744.210376] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d4ac879-6fd5-4c1b-8ec9-3b050c9a674a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.233685] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 744.233685] env[62109]: value = "task-1116261" [ 744.233685] env[62109]: _type = "Task" [ 744.233685] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.244106] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116261, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.309598] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 744.310927] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd00c7f-b6ed-4366-92e5-46a51d2e4f92 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.321094] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 744.321398] env[62109]: ERROR oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk due to incomplete transfer. [ 744.321664] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-35b0971c-c5d5-431d-85d4-89beb3da66a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.332136] env[62109]: DEBUG oslo_vmware.rw_handles [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52013a07-1e77-61d9-e1bd-92be4a32b290/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 744.332478] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Uploaded image b63b1f1b-1ccc-41fa-a8af-9e0f9cada2d3 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 744.336182] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 744.336486] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-711c99dd-f2a0-4921-9ea8-fcdd550d5ac0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.348110] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 744.348110] env[62109]: value = "task-1116262" [ 744.348110] env[62109]: _type = "Task" [ 744.348110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.360388] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116262, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.378521] env[62109]: DEBUG nova.policy [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f86b6f2867544a798ba33119a633ef9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7b925d134f743ab8b6d180a4e2b0fd5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.525923] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116258, 'name': ReconfigVM_Task, 'duration_secs': 0.39828} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.525923] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Reconfigured VM instance instance-00000028 to attach disk [datastore1] a9fb75d5-e303-4f31-888d-528963ab23b7/a9fb75d5-e303-4f31-888d-528963ab23b7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.527338] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc2e0f20-c1b8-4ae5-bf20-7d3978aeadf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.536960] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 744.536960] env[62109]: value = "task-1116263" [ 744.536960] env[62109]: _type = "Task" [ 744.536960] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.541303] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.551615] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116263, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.651722] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 744.733642] env[62109]: DEBUG nova.network.neutron [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Updating instance_info_cache with network_info: [{"id": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "address": "fa:16:3e:1e:28:fc", "network": {"id": "ee8cd43a-e9c7-4f99-8993-f38775d23fb1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1376894308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eff7610abe6345049cbd1e2512f72e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a5722bc-d0", "ovs_interfaceid": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.750515] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116261, 'name': CreateVM_Task, 'duration_secs': 0.476787} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.752567] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 744.754483] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.754687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.755050] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 744.755635] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f26055e1-a6e4-4bf1-b3b6-344414169ef6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.763414] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 744.763414] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d9c94e-92b6-95c3-d8e6-232486d1df9e" [ 744.763414] env[62109]: _type = "Task" [ 744.763414] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.777711] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d9c94e-92b6-95c3-d8e6-232486d1df9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.800909] env[62109]: DEBUG nova.compute.manager [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Received event network-changed-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 744.801136] env[62109]: DEBUG nova.compute.manager [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Refreshing instance network info cache due to event network-changed-bb65c0b6-debe-49a6-a623-fc3778c5b9a8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 744.801377] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Acquiring lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.801546] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Acquired lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.801723] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Refreshing network info cache for port bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 744.858775] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116262, 'name': Destroy_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.986752] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Successfully created port: 374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.051779] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116263, 'name': Rename_Task, 'duration_secs': 0.217891} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.052117] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 745.052473] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2bdadfb-06bb-4c37-a75e-93e69e8fc25b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.060418] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 745.060418] env[62109]: value = "task-1116264" [ 745.060418] env[62109]: _type = "Task" [ 745.060418] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.071957] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.108458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "af3465db-fd56-458d-a499-14df3a0029f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.108693] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.159767] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2cc28d-4f77-463c-9134-e380171a661c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.168742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62406a6-668d-47a5-912a-be4a8b7080aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.204224] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337cdfab-1615-47d5-acab-a3b343cd6745 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.214741] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a42828-7b45-44d4-8d79-9819ec8ce8da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.232199] env[62109]: DEBUG nova.compute.provider_tree [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.239894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Releasing lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.242739] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Instance network_info: |[{"id": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "address": "fa:16:3e:1e:28:fc", "network": {"id": "ee8cd43a-e9c7-4f99-8993-f38775d23fb1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1376894308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eff7610abe6345049cbd1e2512f72e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a5722bc-d0", "ovs_interfaceid": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 745.243097] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:28:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a5722bc-d005-4ebd-8e52-08cbad2eb313', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.248854] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Creating folder: Project (eff7610abe6345049cbd1e2512f72e81). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 745.249558] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4b03eaa-ea35-4da4-b11f-94e13ab1eba6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.264659] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Created folder: Project (eff7610abe6345049cbd1e2512f72e81) in parent group-v244329. [ 745.265156] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Creating folder: Instances. Parent ref: group-v244378. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 745.270155] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfebc0f1-63f6-4d18-90c5-92557d2c8587 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.278892] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d9c94e-92b6-95c3-d8e6-232486d1df9e, 'name': SearchDatastore_Task, 'duration_secs': 0.014945} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.279267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.279517] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.279767] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.279925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.280125] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.280388] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-879e79a1-efd9-4fd2-b294-ea946de5e917 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.283888] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Created folder: Instances in parent group-v244378. [ 745.284249] env[62109]: DEBUG oslo.service.loopingcall [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.284878] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 745.285244] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-463401f5-f781-4243-ace7-f75795e2123a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.303537] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.303746] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 745.304793] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-390e26b5-77ce-4b29-8c96-ca8c6ae7a1dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.310593] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.310593] env[62109]: value = "task-1116267" [ 745.310593] env[62109]: _type = "Task" [ 745.310593] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.317276] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 745.317276] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523b563a-9c7c-201c-990c-97bc5a19497a" [ 745.317276] env[62109]: _type = "Task" [ 745.317276] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.324828] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116267, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.333637] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523b563a-9c7c-201c-990c-97bc5a19497a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.362134] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116262, 'name': Destroy_Task, 'duration_secs': 0.542365} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.362461] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Destroyed the VM [ 745.362804] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 745.363087] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dee0a4b1-66d6-47bf-bca7-98d13ee22381 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.370776] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 745.370776] env[62109]: value = "task-1116268" [ 745.370776] env[62109]: _type = "Task" [ 745.370776] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.379292] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116268, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.547529] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updated VIF entry in instance network info cache for port bb65c0b6-debe-49a6-a623-fc3778c5b9a8. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 745.547529] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating instance_info_cache with network_info: [{"id": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "address": "fa:16:3e:d7:01:2c", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb65c0b6-de", "ovs_interfaceid": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.573242] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116264, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.665348] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.692841] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.693235] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.693337] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.693487] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.693646] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.693800] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.694024] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.694196] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.694395] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.694580] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.694763] env[62109]: DEBUG nova.virt.hardware [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.695661] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e55b86-ce36-4517-bcb2-a91f23f37397 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.704888] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b8f3eb-8183-4fe6-8294-95dbdd5aff9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.723819] env[62109]: DEBUG nova.compute.manager [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Received event network-changed-5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.724045] env[62109]: DEBUG nova.compute.manager [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Refreshing instance network info cache due to event network-changed-5a5722bc-d005-4ebd-8e52-08cbad2eb313. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 745.724329] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] Acquiring lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.724516] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] Acquired lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.724650] env[62109]: DEBUG nova.network.neutron [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Refreshing network info cache for port 5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 745.735494] env[62109]: DEBUG nova.scheduler.client.report [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.825900] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116267, 'name': CreateVM_Task, 'duration_secs': 0.475037} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.826770] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 745.827350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.827565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.827849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 745.828198] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836fd162-76c2-4995-add5-4ac02f346f61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.834236] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523b563a-9c7c-201c-990c-97bc5a19497a, 'name': SearchDatastore_Task, 'duration_secs': 0.01641} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.836141] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 745.836141] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525f86f3-1353-5663-7bfa-94e5076f43a8" [ 745.836141] env[62109]: _type = "Task" [ 745.836141] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.836353] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d897c8d4-4c32-481f-9739-af5f900bbe85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.848499] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525f86f3-1353-5663-7bfa-94e5076f43a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.849741] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 745.849741] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f7c0f-7bea-ff31-db99-edb17565b5ae" [ 745.849741] env[62109]: _type = "Task" [ 745.849741] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.860940] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f7c0f-7bea-ff31-db99-edb17565b5ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.883027] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116268, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.051161] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Releasing lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.051512] env[62109]: DEBUG nova.compute.manager [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Received event network-changed-76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 746.052202] env[62109]: DEBUG nova.compute.manager [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Refreshing instance network info cache due to event network-changed-76f15b7e-4103-4568-8042-248ee15513dc. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 746.052202] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Acquiring lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.052202] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Acquired lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.052384] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Refreshing network info cache for port 76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.074188] env[62109]: DEBUG oslo_vmware.api [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116264, 'name': PowerOnVM_Task, 'duration_secs': 0.587584} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.074624] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 746.078018] env[62109]: INFO nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Took 8.14 seconds to spawn the instance on the hypervisor. [ 746.078018] env[62109]: DEBUG nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 746.078018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912f826e-ac8a-4179-9c01-fcf91cb1d803 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.243168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.243168] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.244136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.830s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.244585] env[62109]: DEBUG nova.objects.instance [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lazy-loading 'resources' on Instance uuid 17ee49a9-d980-46c0-996e-6a43c80be434 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 746.349639] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525f86f3-1353-5663-7bfa-94e5076f43a8, 'name': SearchDatastore_Task, 'duration_secs': 0.020045} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.349958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.350210] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.350427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.361216] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f7c0f-7bea-ff31-db99-edb17565b5ae, 'name': SearchDatastore_Task, 'duration_secs': 0.020613} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.361480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.361741] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 028300fd-f9f8-4606-a39e-53582f830eeb/028300fd-f9f8-4606-a39e-53582f830eeb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 746.362029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.362868] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.362868] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1adbb7b4-43df-419e-9b0b-170150ec6d10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.364756] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2a9c67b-153d-4b9a-b25b-2404c41caac9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.371872] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 746.371872] env[62109]: value = "task-1116269" [ 746.371872] env[62109]: _type = "Task" [ 746.371872] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.382311] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.382548] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 746.383582] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb47ee8e-c1a0-482a-978e-f5001a3ca3ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.391709] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116268, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.391951] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.395916] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 746.395916] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52503f60-5e29-0ae2-ed7f-938fae86f9f7" [ 746.395916] env[62109]: _type = "Task" [ 746.395916] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.403938] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52503f60-5e29-0ae2-ed7f-938fae86f9f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.449977] env[62109]: DEBUG nova.network.neutron [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Updated VIF entry in instance network info cache for port 5a5722bc-d005-4ebd-8e52-08cbad2eb313. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 746.450376] env[62109]: DEBUG nova.network.neutron [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Updating instance_info_cache with network_info: [{"id": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "address": "fa:16:3e:1e:28:fc", "network": {"id": "ee8cd43a-e9c7-4f99-8993-f38775d23fb1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1376894308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eff7610abe6345049cbd1e2512f72e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a5722bc-d0", "ovs_interfaceid": "5a5722bc-d005-4ebd-8e52-08cbad2eb313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.605403] env[62109]: INFO nova.compute.manager [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Took 28.39 seconds to build instance. [ 746.749199] env[62109]: DEBUG nova.compute.utils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.753067] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 746.753292] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.824285] env[62109]: DEBUG nova.policy [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '575179807a0c4e9ca0c0a289170aaae7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49e293771a024d8d843ebb4c094722ff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.884338] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updated VIF entry in instance network info cache for port 76f15b7e-4103-4568-8042-248ee15513dc. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 746.884338] env[62109]: DEBUG nova.network.neutron [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.899457] env[62109]: DEBUG oslo_vmware.api [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116268, 'name': RemoveSnapshot_Task, 'duration_secs': 1.259861} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.909604] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 746.911122] env[62109]: INFO nova.compute.manager [None req-45acd301-d04b-412e-94f0-d224a35aefb2 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 15.48 seconds to snapshot the instance on the hypervisor. [ 746.914543] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116269, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.926373] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52503f60-5e29-0ae2-ed7f-938fae86f9f7, 'name': SearchDatastore_Task, 'duration_secs': 0.01262} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.929510] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36f2817d-8694-41f6-aafa-95f34d3e7ff2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.948382] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 746.948382] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5235d989-3a92-1044-47d9-cd585f3848b4" [ 746.948382] env[62109]: _type = "Task" [ 746.948382] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.954463] env[62109]: DEBUG oslo_concurrency.lockutils [req-b6f35279-9f95-409f-9d79-7b0533c8f9b9 req-528fa84f-919d-477c-924d-f27ff5628947 service nova] Releasing lock "refresh_cache-1aaa9eae-9183-49d7-a452-4345ad2a9aa0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.966308] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5235d989-3a92-1044-47d9-cd585f3848b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.970179] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Successfully updated port: 374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.027754] env[62109]: DEBUG nova.compute.manager [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Received event network-vif-plugged-374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.027980] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] Acquiring lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.028400] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.028599] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.028785] env[62109]: DEBUG nova.compute.manager [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] No waiting events found dispatching network-vif-plugged-374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 747.029434] env[62109]: WARNING nova.compute.manager [req-8ee1eb05-2344-4baa-89f2-c2cdcefc6835 req-121d9614-12be-47d2-a94f-85b5554711e5 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Received unexpected event network-vif-plugged-374c820b-ad09-4e55-88f1-e117b8123aeb for instance with vm_state building and task_state spawning. [ 747.107744] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0278dc5f-bc07-4153-be4e-1ef7c1853659 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.894s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.242717] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Successfully created port: 28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.253869] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.298164] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf885a65-4a68-4ee4-9f7f-d70392c19513 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.309018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbaeb06-f59e-44bf-9d46-58941c2d9588 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.345396] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea5b309-6b9d-4194-979e-e416f277e9a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.354494] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2dd24ee-f677-40ce-b359-772f9f9eed28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.369194] env[62109]: DEBUG nova.compute.provider_tree [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.386419] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ac0d5fd-7b5e-4b16-a254-169a511b29c7 req-43108334-12f1-410c-83a2-47d197402f26 service nova] Releasing lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.396784] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.873876} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.397810] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 028300fd-f9f8-4606-a39e-53582f830eeb/028300fd-f9f8-4606-a39e-53582f830eeb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 747.397810] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 747.397810] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9eebf25f-2335-4539-83be-122285c9bc7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.405214] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 747.405214] env[62109]: value = "task-1116270" [ 747.405214] env[62109]: _type = "Task" [ 747.405214] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.418035] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116270, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.462446] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5235d989-3a92-1044-47d9-cd585f3848b4, 'name': SearchDatastore_Task, 'duration_secs': 0.061208} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.462725] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.462991] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 1aaa9eae-9183-49d7-a452-4345ad2a9aa0/1aaa9eae-9183-49d7-a452-4345ad2a9aa0.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 747.463302] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b06768b-7e2b-4f95-b42f-793e59bd6c30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.471741] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 747.471741] env[62109]: value = "task-1116271" [ 747.471741] env[62109]: _type = "Task" [ 747.471741] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.475505] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.475681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquired lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.475841] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.482976] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116271, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.614922] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 747.872089] env[62109]: DEBUG nova.scheduler.client.report [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.917660] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116270, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070403} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.917660] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 747.917660] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94526741-0a4b-478e-b206-650cdde6f9b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.941436] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 028300fd-f9f8-4606-a39e-53582f830eeb/028300fd-f9f8-4606-a39e-53582f830eeb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.941753] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d75bb728-62a0-4510-ba74-cef3bc1a35d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.957375] env[62109]: DEBUG nova.compute.manager [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 747.958228] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff369de2-04c5-4349-adf8-105fabae8260 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.970920] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 747.970920] env[62109]: value = "task-1116272" [ 747.970920] env[62109]: _type = "Task" [ 747.970920] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.985835] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116272, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.986123] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116271, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.017620] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.136147] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.187274] env[62109]: DEBUG nova.network.neutron [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Updating instance_info_cache with network_info: [{"id": "374c820b-ad09-4e55-88f1-e117b8123aeb", "address": "fa:16:3e:86:25:46", "network": {"id": "2221b52a-4c60-47a9-b8b0-f8285941b40b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-503852520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7b925d134f743ab8b6d180a4e2b0fd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374c820b-ad", "ovs_interfaceid": "374c820b-ad09-4e55-88f1-e117b8123aeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.265959] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.295105] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.295419] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.295701] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.295921] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.296104] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.296277] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.296503] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.296811] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.297027] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.297224] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.297415] env[62109]: DEBUG nova.virt.hardware [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.298356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aae6b6f-3438-4354-a981-2f4c08444218 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.309483] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42667795-d589-4fc8-8dff-c74eaf622b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.377522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.380373] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.805s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.380616] env[62109]: DEBUG nova.objects.instance [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lazy-loading 'resources' on Instance uuid 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.411247] env[62109]: INFO nova.scheduler.client.report [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleted allocations for instance 17ee49a9-d980-46c0-996e-6a43c80be434 [ 748.475183] env[62109]: INFO nova.compute.manager [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] instance snapshotting [ 748.483179] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033e07b9-5d96-4a48-9d47-9fb7e7b9b947 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.501736] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.520984] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116271, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.523266] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2055ae71-c4ff-47e0-9b89-cc8ee1f61cf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.690576] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Releasing lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.691628] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Instance network_info: |[{"id": "374c820b-ad09-4e55-88f1-e117b8123aeb", "address": "fa:16:3e:86:25:46", "network": {"id": "2221b52a-4c60-47a9-b8b0-f8285941b40b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-503852520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7b925d134f743ab8b6d180a4e2b0fd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374c820b-ad", "ovs_interfaceid": "374c820b-ad09-4e55-88f1-e117b8123aeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 748.692783] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:25:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '374c820b-ad09-4e55-88f1-e117b8123aeb', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.699382] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Creating folder: Project (e7b925d134f743ab8b6d180a4e2b0fd5). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.699686] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-658aacc4-66bd-4c03-93e3-e52d0e9885e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.712304] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Created folder: Project (e7b925d134f743ab8b6d180a4e2b0fd5) in parent group-v244329. [ 748.712395] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Creating folder: Instances. Parent ref: group-v244381. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.712647] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98d0f849-fec9-45f8-afaa-c9158a2e76ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.725065] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Created folder: Instances in parent group-v244381. [ 748.725065] env[62109]: DEBUG oslo.service.loopingcall [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.725065] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 748.725065] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36ae373d-d0b3-4cb5-be93-28e3c8ace123 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.748663] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.748663] env[62109]: value = "task-1116275" [ 748.748663] env[62109]: _type = "Task" [ 748.748663] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.756505] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116275, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.920729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-72a06dcb-72aa-4882-9037-cfa39d6ec30a tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "17ee49a9-d980-46c0-996e-6a43c80be434" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.544s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.931920] env[62109]: DEBUG nova.compute.manager [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Received event network-vif-plugged-28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.932143] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] Acquiring lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.932345] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.932508] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.932684] env[62109]: DEBUG nova.compute.manager [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] No waiting events found dispatching network-vif-plugged-28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 748.932848] env[62109]: WARNING nova.compute.manager [req-4f7831d5-87c1-4140-917f-a94f21525064 req-ebae8dcd-2448-4c50-86e1-3475e695f5b1 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Received unexpected event network-vif-plugged-28469078-1559-4ee4-93a9-9165165a7b4c for instance with vm_state building and task_state spawning. [ 748.991422] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116271, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.104272} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.996857] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 1aaa9eae-9183-49d7-a452-4345ad2a9aa0/1aaa9eae-9183-49d7-a452-4345ad2a9aa0.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 748.997126] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.999706] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116272, 'name': ReconfigVM_Task, 'duration_secs': 0.669319} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.000248] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07ce4a7c-e4a3-4e2e-b6e0-2681e50fcf2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.007331] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 028300fd-f9f8-4606-a39e-53582f830eeb/028300fd-f9f8-4606-a39e-53582f830eeb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.007961] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbfda6d1-f227-43d1-9d24-dc0ebfccabb3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.017196] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 749.017196] env[62109]: value = "task-1116276" [ 749.017196] env[62109]: _type = "Task" [ 749.017196] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.021581] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 749.021581] env[62109]: value = "task-1116277" [ 749.021581] env[62109]: _type = "Task" [ 749.021581] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.034823] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116276, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.040417] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 749.040417] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.041304] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Successfully updated port: 28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.042032] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-56846bc4-4381-4775-b35a-a535b3063dff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.058329] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 749.058329] env[62109]: value = "task-1116278" [ 749.058329] env[62109]: _type = "Task" [ 749.058329] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.071706] env[62109]: DEBUG nova.compute.manager [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Received event network-changed-374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.071995] env[62109]: DEBUG nova.compute.manager [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Refreshing instance network info cache due to event network-changed-374c820b-ad09-4e55-88f1-e117b8123aeb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 749.072307] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] Acquiring lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.072532] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] Acquired lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.072870] env[62109]: DEBUG nova.network.neutron [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Refreshing network info cache for port 374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 749.078055] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116278, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.260837] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116275, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.342184] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6f27fc-4184-498c-8d41-94febc9ed39f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.351260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa59f38-89a9-43aa-b4d5-a6eba5cfb2be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.386914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75b8f25-266c-4e3b-b47d-18c5049ba182 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.397867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97123b0b-dff8-42dd-a0dd-3aa65e807c1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.411546] env[62109]: DEBUG nova.compute.provider_tree [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.502324] env[62109]: DEBUG nova.compute.manager [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 749.503332] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2499e5e-d55c-4a62-bfa1-eb9160b1ab9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.527063] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116276, 'name': Rename_Task, 'duration_secs': 0.285991} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.530417] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 749.530670] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6f41b19-2ab0-4076-8eae-c539584767fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.538940] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080408} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.540357] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.541125] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 749.541125] env[62109]: value = "task-1116279" [ 749.541125] env[62109]: _type = "Task" [ 749.541125] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.541617] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc47ab72-23d1-4fcf-a97b-27c13ba2f022 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.548470] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.548709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquired lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.548880] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.572108] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 1aaa9eae-9183-49d7-a452-4345ad2a9aa0/1aaa9eae-9183-49d7-a452-4345ad2a9aa0.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.579931] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e1ad750-065b-46bd-a5d2-5e9f930d0026 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.595156] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.607278] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116278, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.609500] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 749.609500] env[62109]: value = "task-1116280" [ 749.609500] env[62109]: _type = "Task" [ 749.609500] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.620485] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.761045] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116275, 'name': CreateVM_Task, 'duration_secs': 0.704277} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.761045] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.761617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.761671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.762109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.763183] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6d4653c-9dec-47f7-ab04-6745cfaf1c8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.769131] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 749.769131] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da00a5-b77c-6a0d-cea6-bc637b585303" [ 749.769131] env[62109]: _type = "Task" [ 749.769131] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.780121] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da00a5-b77c-6a0d-cea6-bc637b585303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.834490] env[62109]: DEBUG nova.network.neutron [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Updated VIF entry in instance network info cache for port 374c820b-ad09-4e55-88f1-e117b8123aeb. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 749.834913] env[62109]: DEBUG nova.network.neutron [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Updating instance_info_cache with network_info: [{"id": "374c820b-ad09-4e55-88f1-e117b8123aeb", "address": "fa:16:3e:86:25:46", "network": {"id": "2221b52a-4c60-47a9-b8b0-f8285941b40b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-503852520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7b925d134f743ab8b6d180a4e2b0fd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374c820b-ad", "ovs_interfaceid": "374c820b-ad09-4e55-88f1-e117b8123aeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.914427] env[62109]: DEBUG nova.scheduler.client.report [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 750.016955] env[62109]: INFO nova.compute.manager [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] instance snapshotting [ 750.019859] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d20e38-5e9e-49cd-8465-05691f17cadb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.042641] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e778e4-38ae-4f4c-8ae5-68746f088f8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.058768] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.076541] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116278, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.091612] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.120749] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.253173] env[62109]: DEBUG nova.network.neutron [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updating instance_info_cache with network_info: [{"id": "28469078-1559-4ee4-93a9-9165165a7b4c", "address": "fa:16:3e:67:49:92", "network": {"id": "673cf969-ae87-4e20-a288-4363e937b462", "bridge": "br-int", "label": "tempest-ServersTestJSON-426005598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49e293771a024d8d843ebb4c094722ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28469078-15", "ovs_interfaceid": "28469078-1559-4ee4-93a9-9165165a7b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.280766] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52da00a5-b77c-6a0d-cea6-bc637b585303, 'name': SearchDatastore_Task, 'duration_secs': 0.029165} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.281372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.281617] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.281860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.282024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.282213] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.282485] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f9cc8e6-413f-4c33-aa01-3163bff23aae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.291807] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.292013] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 750.292828] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5915bd9-e3b3-4683-8b0c-afbfe6b41f4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.298758] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 750.298758] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5217ad8f-d07c-d055-98cf-a15ad07f1976" [ 750.298758] env[62109]: _type = "Task" [ 750.298758] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.308415] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5217ad8f-d07c-d055-98cf-a15ad07f1976, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.338472] env[62109]: DEBUG oslo_concurrency.lockutils [req-ccd2e235-be98-4db1-86ef-883e1d2dbb00 req-2758050d-340f-4bd2-a0c9-5b3f2d11f9c0 service nova] Releasing lock "refresh_cache-8584eb2c-57a3-455e-9d3c-877286e23ccc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.420993] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.423868] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.545s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.423868] env[62109]: DEBUG nova.objects.instance [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 750.443763] env[62109]: INFO nova.scheduler.client.report [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Deleted allocations for instance 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371 [ 750.556429] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.562157] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 750.562523] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c4c71c70-de73-4c0b-bc80-37864f1a1763 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.574220] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116278, 'name': CreateSnapshot_Task, 'duration_secs': 1.279892} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.575490] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 750.575829] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 750.575829] env[62109]: value = "task-1116281" [ 750.575829] env[62109]: _type = "Task" [ 750.575829] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.577035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391a96eb-ec7a-447c-beb9-2ef245c1ff83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.592294] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116281, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.620515] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116280, 'name': ReconfigVM_Task, 'duration_secs': 0.617771} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.621519] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 1aaa9eae-9183-49d7-a452-4345ad2a9aa0/1aaa9eae-9183-49d7-a452-4345ad2a9aa0.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.622175] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6af63497-78b7-4c35-a174-d6d11a7b37a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.630565] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 750.630565] env[62109]: value = "task-1116282" [ 750.630565] env[62109]: _type = "Task" [ 750.630565] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.644611] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116282, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.757019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Releasing lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.757019] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Instance network_info: |[{"id": "28469078-1559-4ee4-93a9-9165165a7b4c", "address": "fa:16:3e:67:49:92", "network": {"id": "673cf969-ae87-4e20-a288-4363e937b462", "bridge": "br-int", "label": "tempest-ServersTestJSON-426005598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49e293771a024d8d843ebb4c094722ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28469078-15", "ovs_interfaceid": "28469078-1559-4ee4-93a9-9165165a7b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 750.757217] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:49:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '750b5f9b-f78a-4650-9153-c5bb117e507c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28469078-1559-4ee4-93a9-9165165a7b4c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.764438] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Creating folder: Project (49e293771a024d8d843ebb4c094722ff). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.765115] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd41df55-e6a7-4af0-a360-58bef7ebaace {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.776979] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Created folder: Project (49e293771a024d8d843ebb4c094722ff) in parent group-v244329. [ 750.778029] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Creating folder: Instances. Parent ref: group-v244385. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.778029] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1fbc0d13-b414-48dc-910f-2404186236a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.789374] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Created folder: Instances in parent group-v244385. [ 750.790033] env[62109]: DEBUG oslo.service.loopingcall [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.790033] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 750.790355] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e46525f-3f3a-46d3-88a5-8e36e79ace1e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.815692] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5217ad8f-d07c-d055-98cf-a15ad07f1976, 'name': SearchDatastore_Task, 'duration_secs': 0.028958} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.818246] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.818246] env[62109]: value = "task-1116285" [ 750.818246] env[62109]: _type = "Task" [ 750.818246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.818661] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d949a13-0a01-4194-8e86-c19f614a0db8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.827468] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 750.827468] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523ae45b-9f60-2976-b804-61440039f352" [ 750.827468] env[62109]: _type = "Task" [ 750.827468] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.830663] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116285, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.840833] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523ae45b-9f60-2976-b804-61440039f352, 'name': SearchDatastore_Task} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.841170] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.841444] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8584eb2c-57a3-455e-9d3c-877286e23ccc/8584eb2c-57a3-455e-9d3c-877286e23ccc.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 750.841896] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e56ac916-6a05-4a04-8c1d-6ece7f05fd29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.850989] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 750.850989] env[62109]: value = "task-1116286" [ 750.850989] env[62109]: _type = "Task" [ 750.850989] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.859451] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.954938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc9146f6-aa5b-4b17-938f-27682d6cf7c7 tempest-ListServersNegativeTestJSON-364914635 tempest-ListServersNegativeTestJSON-364914635-project-member] Lock "46aa78cc-ea0a-4c1b-aadb-f2a4856c9371" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.454s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.058110] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.080441] env[62109]: DEBUG nova.compute.manager [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Received event network-changed-28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.080697] env[62109]: DEBUG nova.compute.manager [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Refreshing instance network info cache due to event network-changed-28469078-1559-4ee4-93a9-9165165a7b4c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 751.080997] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] Acquiring lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.081270] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] Acquired lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.081505] env[62109]: DEBUG nova.network.neutron [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Refreshing network info cache for port 28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 751.103603] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 751.104134] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116281, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.108975] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cdfd7cd6-e330-4495-b9c9-05a84bf5902c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.125129] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 751.125129] env[62109]: value = "task-1116287" [ 751.125129] env[62109]: _type = "Task" [ 751.125129] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.139623] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.147230] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116282, 'name': Rename_Task, 'duration_secs': 0.292452} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.147646] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 751.147976] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab8663f3-07e8-464c-8c53-263fd0380b38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.157416] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 751.157416] env[62109]: value = "task-1116288" [ 751.157416] env[62109]: _type = "Task" [ 751.157416] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.173130] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.335344] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116285, 'name': CreateVM_Task, 'duration_secs': 0.431757} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.335344] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 751.335717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.335717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.336059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.337394] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bf618d4-da29-4af0-b576-f325d87517a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.344028] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 751.344028] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527b9f7f-8c10-3a24-5e54-319c96c5849e" [ 751.344028] env[62109]: _type = "Task" [ 751.344028] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.357372] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527b9f7f-8c10-3a24-5e54-319c96c5849e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.368227] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116286, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.433782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95c5b623-337d-4e0f-95a8-c34906b5ec75 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.435142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.079s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.440791] env[62109]: INFO nova.compute.claims [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.557756] env[62109]: DEBUG oslo_vmware.api [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116279, 'name': PowerOnVM_Task, 'duration_secs': 1.821539} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.557939] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 751.558182] env[62109]: INFO nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Took 10.86 seconds to spawn the instance on the hypervisor. [ 751.558960] env[62109]: DEBUG nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 751.559430] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a707c4ac-4796-4c5f-ab84-8cc76c9b0243 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.603372] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116281, 'name': CreateSnapshot_Task, 'duration_secs': 0.911595} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.604401] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 751.605219] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65033d58-d338-486e-919f-9bfb95b3e67b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.636829] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.671156] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116288, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.857302] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527b9f7f-8c10-3a24-5e54-319c96c5849e, 'name': SearchDatastore_Task, 'duration_secs': 0.025681} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.857509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.857754] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.858024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.858132] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.858379] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.861684] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9832523f-9d7e-4371-8010-4752ddfeebc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.866448] env[62109]: DEBUG nova.network.neutron [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updated VIF entry in instance network info cache for port 28469078-1559-4ee4-93a9-9165165a7b4c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 751.866852] env[62109]: DEBUG nova.network.neutron [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updating instance_info_cache with network_info: [{"id": "28469078-1559-4ee4-93a9-9165165a7b4c", "address": "fa:16:3e:67:49:92", "network": {"id": "673cf969-ae87-4e20-a288-4363e937b462", "bridge": "br-int", "label": "tempest-ServersTestJSON-426005598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49e293771a024d8d843ebb4c094722ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28469078-15", "ovs_interfaceid": "28469078-1559-4ee4-93a9-9165165a7b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.875558] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529224} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.876024] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8584eb2c-57a3-455e-9d3c-877286e23ccc/8584eb2c-57a3-455e-9d3c-877286e23ccc.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 751.876313] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.877213] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a189261f-e044-4478-b373-53ba8e75cf09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.880434] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.880619] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 751.882026] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c18c07-c811-4453-8ae6-bbfd5e11c71e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.887681] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 751.887681] env[62109]: value = "task-1116289" [ 751.887681] env[62109]: _type = "Task" [ 751.887681] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.889323] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 751.889323] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fc303a-bffa-ba25-9e27-a9a4b6016232" [ 751.889323] env[62109]: _type = "Task" [ 751.889323] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.915238] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.922020] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fc303a-bffa-ba25-9e27-a9a4b6016232, 'name': SearchDatastore_Task, 'duration_secs': 0.023366} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.922020] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfda3fd2-9f51-436a-85ba-2b71a6ee894b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.927597] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 751.927597] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d5790c-94b3-f21f-8b9d-12dca55c845d" [ 751.927597] env[62109]: _type = "Task" [ 751.927597] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.944265] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d5790c-94b3-f21f-8b9d-12dca55c845d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.081830] env[62109]: INFO nova.compute.manager [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Took 32.84 seconds to build instance. [ 752.127982] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 752.128433] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0bddb1db-8e32-47c7-9389-435880930bea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.145096] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.146667] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 752.146667] env[62109]: value = "task-1116290" [ 752.146667] env[62109]: _type = "Task" [ 752.146667] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.155988] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116290, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.172695] env[62109]: DEBUG oslo_vmware.api [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116288, 'name': PowerOnVM_Task, 'duration_secs': 0.995467} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.172695] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 752.172695] env[62109]: INFO nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Took 9.13 seconds to spawn the instance on the hypervisor. [ 752.172695] env[62109]: DEBUG nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 752.173528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebca6c7-3f0e-45e1-9f56-fd5452b7fc12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.371248] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0a34db0-6034-4cb9-b86e-1b94c3a1745c req-d18687bf-670c-466b-ae13-63edd54a8bc2 service nova] Releasing lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.402160] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071061} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.402590] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.404055] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031a68f3-8315-4d26-adf3-4d73e94fd0a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.430332] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 8584eb2c-57a3-455e-9d3c-877286e23ccc/8584eb2c-57a3-455e-9d3c-877286e23ccc.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.430747] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e17354f4-aca1-4ea2-9f44-74e9c317910e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.460761] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d5790c-94b3-f21f-8b9d-12dca55c845d, 'name': SearchDatastore_Task, 'duration_secs': 0.013122} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.464986] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.465299] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8b6ec904-8c68-4eaa-94fe-47a87528e26b/8b6ec904-8c68-4eaa-94fe-47a87528e26b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 752.465672] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 752.465672] env[62109]: value = "task-1116291" [ 752.465672] env[62109]: _type = "Task" [ 752.465672] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.465879] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fde4f61-2290-4dbe-bf7f-af584bfa04b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.479737] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.482228] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 752.482228] env[62109]: value = "task-1116292" [ 752.482228] env[62109]: _type = "Task" [ 752.482228] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.495199] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.585226] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c5b5a740-5583-4de4-a876-df0cbc5c37f9 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.843s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.643441] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.657449] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116290, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.701608] env[62109]: INFO nova.compute.manager [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Took 29.72 seconds to build instance. [ 752.926208] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07505e11-edee-4936-b176-3a43754d12da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.938742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1522f9-5a7f-4728-a38a-9ac3446cd703 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.999772] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2753737-7a9d-4c51-be78-75bea06a7dce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.010081] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116291, 'name': ReconfigVM_Task, 'duration_secs': 0.331046} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.016582] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 8584eb2c-57a3-455e-9d3c-877286e23ccc/8584eb2c-57a3-455e-9d3c-877286e23ccc.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.017483] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116292, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.017804] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d51360b6-640d-434b-b9f9-5fdaee18daba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.021334] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a3e92f-d982-4230-b1b4-0c338391d8cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.038696] env[62109]: DEBUG nova.compute.provider_tree [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.041576] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 753.041576] env[62109]: value = "task-1116293" [ 753.041576] env[62109]: _type = "Task" [ 753.041576] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.054222] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116293, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.090420] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 753.143031] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.159099] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116290, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.204518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.207690] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1769e4fc-8d5d-4b20-881f-9fb22e5a4442 tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.527s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.209123] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.005s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.211115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.211115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.211115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.213110] env[62109]: INFO nova.compute.manager [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Terminating instance [ 753.214584] env[62109]: DEBUG nova.compute.manager [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 753.214859] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 753.216029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a816e37-ace4-4962-8ab5-94b020b203e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.225618] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 753.225910] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c93c8d4e-8a85-4ed1-a3bd-adb13b1b50fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.234202] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 753.234202] env[62109]: value = "task-1116294" [ 753.234202] env[62109]: _type = "Task" [ 753.234202] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.245237] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.499824] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570655} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.500161] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8b6ec904-8c68-4eaa-94fe-47a87528e26b/8b6ec904-8c68-4eaa-94fe-47a87528e26b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 753.500362] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.500615] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f792ba38-125d-4127-bac7-be9306cb7bdf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.510887] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 753.510887] env[62109]: value = "task-1116295" [ 753.510887] env[62109]: _type = "Task" [ 753.510887] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.522179] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.544655] env[62109]: DEBUG nova.scheduler.client.report [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.559082] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116293, 'name': Rename_Task, 'duration_secs': 0.15821} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.560090] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 753.560345] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37eac3cc-cd2f-40fe-ab40-d48d5b88e535 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.568692] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 753.568692] env[62109]: value = "task-1116296" [ 753.568692] env[62109]: _type = "Task" [ 753.568692] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.579963] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.616770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.645662] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116287, 'name': CloneVM_Task, 'duration_secs': 2.067236} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.647476] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Created linked-clone VM from snapshot [ 753.648472] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7a57e7-e5d3-48cc-95e0-d2c5ba942ee4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.661316] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116290, 'name': CloneVM_Task, 'duration_secs': 1.421859} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.664583] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Created linked-clone VM from snapshot [ 753.664888] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Uploading image 82fc1455-d47f-4216-8024-8ece46171084 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 753.667724] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0088e08-612a-462e-9ac4-fc6e28c4b6b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.676015] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Uploading image 870ded0e-f967-44e6-a041-a5796e0c3185 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 753.687146] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 753.687146] env[62109]: value = "vm-244388" [ 753.687146] env[62109]: _type = "VirtualMachine" [ 753.687146] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 753.687146] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d8d4dbb6-fdf7-4a38-911c-f2d31a56340a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.696886] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease: (returnval){ [ 753.696886] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527c3334-5870-c23e-6d2b-d8cd6d47a49b" [ 753.696886] env[62109]: _type = "HttpNfcLease" [ 753.696886] env[62109]: } obtained for exporting VM: (result){ [ 753.696886] env[62109]: value = "vm-244388" [ 753.696886] env[62109]: _type = "VirtualMachine" [ 753.696886] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 753.697177] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the lease: (returnval){ [ 753.697177] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527c3334-5870-c23e-6d2b-d8cd6d47a49b" [ 753.697177] env[62109]: _type = "HttpNfcLease" [ 753.697177] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 753.704396] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 753.704396] env[62109]: value = "vm-244390" [ 753.704396] env[62109]: _type = "VirtualMachine" [ 753.704396] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 753.704628] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-25a522cc-7229-494f-9de6-f2ff5a0e56ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.707716] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 753.707716] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527c3334-5870-c23e-6d2b-d8cd6d47a49b" [ 753.707716] env[62109]: _type = "HttpNfcLease" [ 753.707716] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 753.712623] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 753.717786] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lease: (returnval){ [ 753.717786] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260de3f-2cdf-048b-6714-fc2ac88d6d1f" [ 753.717786] env[62109]: _type = "HttpNfcLease" [ 753.717786] env[62109]: } obtained for exporting VM: (result){ [ 753.717786] env[62109]: value = "vm-244390" [ 753.717786] env[62109]: _type = "VirtualMachine" [ 753.717786] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 753.718839] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the lease: (returnval){ [ 753.718839] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260de3f-2cdf-048b-6714-fc2ac88d6d1f" [ 753.718839] env[62109]: _type = "HttpNfcLease" [ 753.718839] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 753.726184] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 753.726184] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260de3f-2cdf-048b-6714-fc2ac88d6d1f" [ 753.726184] env[62109]: _type = "HttpNfcLease" [ 753.726184] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 753.746282] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116294, 'name': PowerOffVM_Task, 'duration_secs': 0.234776} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.746282] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 753.746282] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 753.746282] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e75faaae-130a-4c81-9c29-fcd29be6d173 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.768768] env[62109]: DEBUG nova.compute.manager [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Received event network-changed-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 753.768884] env[62109]: DEBUG nova.compute.manager [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Refreshing instance network info cache due to event network-changed-bb65c0b6-debe-49a6-a623-fc3778c5b9a8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 753.769111] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] Acquiring lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.769366] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] Acquired lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.769557] env[62109]: DEBUG nova.network.neutron [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Refreshing network info cache for port bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 753.854996] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 753.855163] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 753.855403] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Deleting the datastore file [datastore2] 1aaa9eae-9183-49d7-a452-4345ad2a9aa0 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.855712] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-152f3f90-9000-4e27-84ab-0a006ac0ac11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.866106] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for the task: (returnval){ [ 753.866106] env[62109]: value = "task-1116300" [ 753.866106] env[62109]: _type = "Task" [ 753.866106] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.876495] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.023082] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074973} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.023082] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.023745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47cb0af-23dc-4932-9bab-c950f1c23185 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.047673] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 8b6ec904-8c68-4eaa-94fe-47a87528e26b/8b6ec904-8c68-4eaa-94fe-47a87528e26b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.048384] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58408eb4-ca2e-4fd9-956f-77d0e76d8aec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.063872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.064399] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 754.066904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.958s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.067132] env[62109]: DEBUG nova.objects.instance [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'resources' on Instance uuid d727d597-c4ac-426e-bdc3-fc4f73a3eac9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 754.075387] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 754.075387] env[62109]: value = "task-1116301" [ 754.075387] env[62109]: _type = "Task" [ 754.075387] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.082169] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.089924] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.207482] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 754.207482] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527c3334-5870-c23e-6d2b-d8cd6d47a49b" [ 754.207482] env[62109]: _type = "HttpNfcLease" [ 754.207482] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 754.207779] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 754.207779] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527c3334-5870-c23e-6d2b-d8cd6d47a49b" [ 754.207779] env[62109]: _type = "HttpNfcLease" [ 754.207779] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 754.208579] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065e81ea-fb85-4563-a5ce-e70be5f8e2d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.217175] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 754.217480] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 754.301663] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 754.301663] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260de3f-2cdf-048b-6714-fc2ac88d6d1f" [ 754.301663] env[62109]: _type = "HttpNfcLease" [ 754.301663] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 754.302410] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 754.302410] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260de3f-2cdf-048b-6714-fc2ac88d6d1f" [ 754.302410] env[62109]: _type = "HttpNfcLease" [ 754.302410] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 754.303746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75962de8-d1ad-4aa9-a0a7-21ee4093c692 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.312935] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 754.317478] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 754.319240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.389142] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1fd37e45-8161-40bf-9601-1dc85cdc6cb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.397424] env[62109]: DEBUG oslo_vmware.api [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Task: {'id': task-1116300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15373} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.397768] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.397936] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 754.398177] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 754.398378] env[62109]: INFO nova.compute.manager [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Took 1.18 seconds to destroy the instance on the hypervisor. [ 754.398630] env[62109]: DEBUG oslo.service.loopingcall [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.398816] env[62109]: DEBUG nova.compute.manager [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.398928] env[62109]: DEBUG nova.network.neutron [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 754.439526] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-03f9eaff-32ca-48b5-acae-a53211902867 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.573803] env[62109]: DEBUG nova.compute.utils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.576197] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 754.600666] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116296, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.600946] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116301, 'name': ReconfigVM_Task, 'duration_secs': 0.414407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.606119] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 8b6ec904-8c68-4eaa-94fe-47a87528e26b/8b6ec904-8c68-4eaa-94fe-47a87528e26b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.606119] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb5c5263-274b-4547-9820-d89e2f80ae56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.614871] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 754.614871] env[62109]: value = "task-1116302" [ 754.614871] env[62109]: _type = "Task" [ 754.614871] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.624721] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116302, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.647688] env[62109]: DEBUG nova.network.neutron [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updated VIF entry in instance network info cache for port bb65c0b6-debe-49a6-a623-fc3778c5b9a8. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 754.648191] env[62109]: DEBUG nova.network.neutron [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating instance_info_cache with network_info: [{"id": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "address": "fa:16:3e:d7:01:2c", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb65c0b6-de", "ovs_interfaceid": "bb65c0b6-debe-49a6-a623-fc3778c5b9a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.962662] env[62109]: DEBUG nova.compute.manager [req-c9e1e69d-ac56-4208-a34f-33c96101bda9 req-565c2671-7dc8-4ddb-855d-3481719ab1fb service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Received event network-vif-deleted-5a5722bc-d005-4ebd-8e52-08cbad2eb313 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 754.963152] env[62109]: INFO nova.compute.manager [req-c9e1e69d-ac56-4208-a34f-33c96101bda9 req-565c2671-7dc8-4ddb-855d-3481719ab1fb service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Neutron deleted interface 5a5722bc-d005-4ebd-8e52-08cbad2eb313; detaching it from the instance and deleting it from the info cache [ 754.963249] env[62109]: DEBUG nova.network.neutron [req-c9e1e69d-ac56-4208-a34f-33c96101bda9 req-565c2671-7dc8-4ddb-855d-3481719ab1fb service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.064628] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cafa47e-fa62-4f84-9789-ca6efd747dbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.079379] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5561cb-e975-4c72-82cb-5725cacb0690 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.083370] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 755.092484] env[62109]: DEBUG oslo_vmware.api [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116296, 'name': PowerOnVM_Task, 'duration_secs': 1.062094} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.120633] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 755.120633] env[62109]: INFO nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Took 9.45 seconds to spawn the instance on the hypervisor. [ 755.120633] env[62109]: DEBUG nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 755.121928] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620c8fdb-bf56-41c2-b7ef-413135d79269 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.130566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c20671b-8244-4645-a9b0-58efb4042ffb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.147941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21199500-43e4-4ced-91c1-794d7b656890 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.152542] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116302, 'name': Rename_Task, 'duration_secs': 0.19327} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.156583] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 755.158687] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c78eb9e-4006-4c21-887a-9ca8e95c4cbc req-4a2d7c4d-68c9-460f-a49a-a95953c66445 service nova] Releasing lock "refresh_cache-028300fd-f9f8-4606-a39e-53582f830eeb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.160196] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4cdaad42-1fd5-4672-83dd-e63039d36ea1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.174461] env[62109]: DEBUG nova.compute.provider_tree [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.183077] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 755.183077] env[62109]: value = "task-1116303" [ 755.183077] env[62109]: _type = "Task" [ 755.183077] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.195413] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.254717] env[62109]: DEBUG nova.network.neutron [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.471107] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ab3740a-bff1-4a61-95e5-2ab7d1625d89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.485131] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b9f8b8-325b-4372-b751-7e1e0abbc98f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.521921] env[62109]: DEBUG nova.compute.manager [req-c9e1e69d-ac56-4208-a34f-33c96101bda9 req-565c2671-7dc8-4ddb-855d-3481719ab1fb service nova] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Detach interface failed, port_id=5a5722bc-d005-4ebd-8e52-08cbad2eb313, reason: Instance 1aaa9eae-9183-49d7-a452-4345ad2a9aa0 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 755.670029] env[62109]: INFO nova.compute.manager [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Took 30.94 seconds to build instance. [ 755.681908] env[62109]: DEBUG nova.scheduler.client.report [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.709429] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116303, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.757484] env[62109]: INFO nova.compute.manager [-] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Took 1.36 seconds to deallocate network for instance. [ 756.094146] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 756.173688] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce94cb95-8f96-4e9d-a444-8b9cafdb6731 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.669s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.190924] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.124s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.193558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.592s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.195231] env[62109]: INFO nova.compute.claims [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.212719] env[62109]: DEBUG oslo_vmware.api [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116303, 'name': PowerOnVM_Task, 'duration_secs': 0.763195} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.213479] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 756.213725] env[62109]: INFO nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Took 7.95 seconds to spawn the instance on the hypervisor. [ 756.214090] env[62109]: DEBUG nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 756.215031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef0afe4-ad55-4440-a2a6-12fa549775c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.220549] env[62109]: INFO nova.scheduler.client.report [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocations for instance d727d597-c4ac-426e-bdc3-fc4f73a3eac9 [ 756.265838] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.676431] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 756.739669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cba474a8-bf49-4eb6-922d-4d0fb3bc5be9 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "d727d597-c4ac-426e-bdc3-fc4f73a3eac9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.870s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.743078] env[62109]: INFO nova.compute.manager [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Took 30.90 seconds to build instance. [ 757.200082] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.245157] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c66d823-17ef-4e82-bfc0-a543ef45b6ff tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.880s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.594749] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9060c5cd-2963-437c-807c-e1515ff16cb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.604427] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e28c30c-5b4e-4bb9-aff1-b1865cdde42e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.638356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2df2e93-4b72-4011-a9d0-08ff0398670f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.647335] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6591288-ec40-496d-90ba-a628e5fffc23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.661983] env[62109]: DEBUG nova.compute.provider_tree [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.752061] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 758.165175] env[62109]: DEBUG nova.scheduler.client.report [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.273565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.670609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.671250] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 758.674085] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.539s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.675685] env[62109]: INFO nova.compute.claims [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.180028] env[62109]: DEBUG nova.compute.utils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.186800] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 759.187083] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 759.238583] env[62109]: DEBUG nova.policy [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '741d963eb6fe473db210b0d6956e8193', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5435b5d2a57a47a9a087b0f466ed33b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 759.498280] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Successfully created port: 7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.690361] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 760.078444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6698ed0-a013-4cca-ad6b-f22fa89824fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.088462] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9062ed6-09e3-4165-b55a-d1778bc116aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.123941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcf7a6d-87e3-4ebb-9dc1-cc5fcbfdc215 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.133416] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442127b-8b28-414b-9f7f-b0d9c1e8edfc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.149187] env[62109]: DEBUG nova.compute.provider_tree [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.652242] env[62109]: DEBUG nova.scheduler.client.report [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.703457] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 760.706252] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.706454] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.064309] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Successfully updated port: 7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.158022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.158634] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.161387] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.955s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.161627] env[62109]: DEBUG nova.objects.instance [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lazy-loading 'resources' on Instance uuid 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.212981] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.213171] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 761.213281] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 761.566481] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.566775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.566989] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 761.665641] env[62109]: DEBUG nova.compute.utils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.667285] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.667467] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 761.713578] env[62109]: DEBUG nova.policy [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '741d963eb6fe473db210b0d6956e8193', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5435b5d2a57a47a9a087b0f466ed33b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 761.717966] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 761.718172] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 761.718318] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 761.750987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.751867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.751867] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 761.751867] env[62109]: DEBUG nova.objects.instance [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lazy-loading 'info_cache' on Instance uuid 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.928585] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.928891] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.928992] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.929196] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.930248] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.930488] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.930875] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.931206] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.931508] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.931814] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.932997] env[62109]: DEBUG nova.virt.hardware [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.935398] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c6ae7d-e8e6-4b23-84ff-d94fed6d5237 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.948097] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.948293] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.948540] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.948633] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.948776] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.948930] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.949967] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.950222] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.950500] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.950598] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.950763] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.956035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42a4f54-2736-4a1f-9133-f2ef5d7ecd2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.966619] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f7ead1-d8ac-4748-9c75-64f3fea23934 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.972786] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 761.974148] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0acb6dd-ffaf-474f-b1e3-315f37c8a81f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.980525] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b12968-570e-498b-aa11-8c2698668813 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.996933] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 761.996933] env[62109]: ERROR oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk due to incomplete transfer. [ 761.997452] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.003027] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Creating folder: Project (84feb76c90c4418bb16ec51da00c53fc). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.005898] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dd28d150-924a-4b01-b552-9ace2ce63c3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.007651] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdd3e831-571a-4184-8dc6-1285bdaa065d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.026645] env[62109]: DEBUG oslo_vmware.rw_handles [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f9b43-b9af-fc19-d5cf-93d503ec93cd/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 762.026905] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Uploaded image 82fc1455-d47f-4216-8024-8ece46171084 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 762.029936] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 762.034203] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Successfully created port: 91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.036103] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-137f2fcf-8b6a-4c9c-b896-bf071500e361 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.038070] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Created folder: Project (84feb76c90c4418bb16ec51da00c53fc) in parent group-v244329. [ 762.038281] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Creating folder: Instances. Parent ref: group-v244391. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.039569] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64c172c7-03c7-4129-a46d-b231496eace1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.047645] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 762.047645] env[62109]: value = "task-1116306" [ 762.047645] env[62109]: _type = "Task" [ 762.047645] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.056119] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Created folder: Instances in parent group-v244391. [ 762.056539] env[62109]: DEBUG oslo.service.loopingcall [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.057656] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 762.058041] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e89cb327-64ed-4927-b8b7-959db9799a69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.079271] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116306, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.087515] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.087515] env[62109]: value = "task-1116307" [ 762.087515] env[62109]: _type = "Task" [ 762.087515] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.097114] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116307, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.122885] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.143671] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9096fcd6-5545-4fe5-9ab1-4923b3c46172 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.155645] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b3f306-3fe4-4df9-9c68-5d8faec5a5f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.191521] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 762.198950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f71d02b-c0f7-4ea5-864d-031e9fd58a48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.209287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7706377e-79e6-48e6-af0f-ef2e650c29ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.225663] env[62109]: DEBUG nova.compute.provider_tree [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.319651] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Updating instance_info_cache with network_info: [{"id": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "address": "fa:16:3e:7c:d7:68", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bfa3d99-6c", "ovs_interfaceid": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.568737] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116306, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.599810] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116307, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.729424] env[62109]: DEBUG nova.scheduler.client.report [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 762.803520] env[62109]: DEBUG nova.compute.manager [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Received event network-vif-plugged-7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 762.804760] env[62109]: DEBUG oslo_concurrency.lockutils [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] Acquiring lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.804760] env[62109]: DEBUG oslo_concurrency.lockutils [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.804760] env[62109]: DEBUG oslo_concurrency.lockutils [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.805225] env[62109]: DEBUG nova.compute.manager [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] No waiting events found dispatching network-vif-plugged-7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 762.805546] env[62109]: WARNING nova.compute.manager [req-882920e2-12f3-4d88-9d8c-1b14139d2df0 req-7fb763af-d6d2-4a30-989c-8cff40dbc899 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Received unexpected event network-vif-plugged-7bfa3d99-6ca4-4382-a04a-d5ad176b7597 for instance with vm_state building and task_state spawning. [ 762.822211] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.823316] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Instance network_info: |[{"id": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "address": "fa:16:3e:7c:d7:68", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bfa3d99-6c", "ovs_interfaceid": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 762.823496] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:d7:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f85835c8-5d0c-4b2f-97c4-6c4006580f79', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7bfa3d99-6ca4-4382-a04a-d5ad176b7597', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.830976] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating folder: Project (5435b5d2a57a47a9a087b0f466ed33b5). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.831554] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-905b658e-245b-4e88-bebd-e49d8eef6331 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.851637] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created folder: Project (5435b5d2a57a47a9a087b0f466ed33b5) in parent group-v244329. [ 762.851922] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating folder: Instances. Parent ref: group-v244394. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.852235] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8ff5841-d3f5-48fb-9111-94ed721074e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.866508] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created folder: Instances in parent group-v244394. [ 762.866826] env[62109]: DEBUG oslo.service.loopingcall [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.867067] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 762.867482] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fb19135-b540-46d7-a660-4c3022e14e63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.890457] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.890457] env[62109]: value = "task-1116310" [ 762.890457] env[62109]: _type = "Task" [ 762.890457] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.914232] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116310, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.059692] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116306, 'name': Destroy_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.099427] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116307, 'name': CreateVM_Task, 'duration_secs': 0.669326} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.099602] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 763.100119] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.100321] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.100609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.100892] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d2fe02-b248-49b4-8c41-de00b14a15c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.106739] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 763.106739] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524b8ab1-422a-e261-84ba-e36c1053aade" [ 763.106739] env[62109]: _type = "Task" [ 763.106739] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.116092] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524b8ab1-422a-e261-84ba-e36c1053aade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.209676] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 763.222556] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 763.223564] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bb73af-4bcd-493f-ba0f-33cda60a177b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.230720] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 763.230905] env[62109]: ERROR oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk due to incomplete transfer. [ 763.231159] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9291de6c-09c6-40bb-9cd3-0a7315547b38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.234607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.239017] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.239258] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.239419] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.239604] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.239838] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.240029] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.240327] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.240511] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.240700] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.240872] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.241070] env[62109]: DEBUG nova.virt.hardware [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.241390] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.601s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.245024] env[62109]: INFO nova.compute.claims [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.246887] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6f6fc4-e4f9-466a-9785-62aafa9460f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.252591] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5225d604-302d-8a44-aea7-b0b05defe827/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 763.252811] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Uploaded image 870ded0e-f967-44e6-a041-a5796e0c3185 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 763.254368] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 763.254989] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6d1b4328-a782-402d-ae3e-790d9b7952d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.262892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e5e992-e24e-43a2-94a8-6f90d1716067 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.272552] env[62109]: INFO nova.scheduler.client.report [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Deleted allocations for instance 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e [ 763.277349] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 763.277349] env[62109]: value = "task-1116311" [ 763.277349] env[62109]: _type = "Task" [ 763.277349] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.307479] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116311, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.401299] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116310, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.562327] env[62109]: DEBUG nova.compute.manager [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Received event network-changed-28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.562523] env[62109]: DEBUG nova.compute.manager [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Refreshing instance network info cache due to event network-changed-28469078-1559-4ee4-93a9-9165165a7b4c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 763.562816] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] Acquiring lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.562983] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] Acquired lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.563169] env[62109]: DEBUG nova.network.neutron [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Refreshing network info cache for port 28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 763.571616] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116306, 'name': Destroy_Task, 'duration_secs': 1.514857} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.572105] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Destroyed the VM [ 763.572374] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 763.573375] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a0637224-a26c-4491-b442-61dc60bb79db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.581263] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 763.581263] env[62109]: value = "task-1116312" [ 763.581263] env[62109]: _type = "Task" [ 763.581263] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.591883] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116312, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.602620] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.623825] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524b8ab1-422a-e261-84ba-e36c1053aade, 'name': SearchDatastore_Task, 'duration_secs': 0.012018} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.624164] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.624405] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.624640] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.624789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.624973] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.625371] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6396f18b-b808-4a53-b6c8-17700df9b2cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.637441] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.637678] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 763.638508] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c28669be-7b62-4a93-9001-6567ede0d3b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.645386] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 763.645386] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e45a72-598a-b1d6-46d0-3b8fff7a08b8" [ 763.645386] env[62109]: _type = "Task" [ 763.645386] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.654838] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e45a72-598a-b1d6-46d0-3b8fff7a08b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.791391] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116311, 'name': Destroy_Task, 'duration_secs': 0.331003} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.791391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c9bed84-29b0-468e-9a2d-d67a1f6a72d3 tempest-ServerMetadataTestJSON-338374499 tempest-ServerMetadataTestJSON-338374499-project-member] Lock "8d9a7696-0465-4895-9ce8-4b4b8b2ca59e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.689s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.791728] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Destroyed the VM [ 763.791768] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 763.792200] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e0761e17-9363-43d4-b07a-a5aa6b5b9382 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.803225] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 763.803225] env[62109]: value = "task-1116313" [ 763.803225] env[62109]: _type = "Task" [ 763.803225] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.814863] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116313, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.903949] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116310, 'name': CreateVM_Task, 'duration_secs': 0.973639} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.904199] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 763.904989] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.905191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.905527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.905794] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caafd112-670a-4d0a-a809-a0de14da974b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.912616] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 763.912616] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216e453-fdba-a700-c7d9-446fa5401204" [ 763.912616] env[62109]: _type = "Task" [ 763.912616] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.923286] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216e453-fdba-a700-c7d9-446fa5401204, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.048629] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Successfully updated port: 91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.104063] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116312, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.106495] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.106495] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 764.106495] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.106495] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.106754] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.106788] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.106962] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.107099] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.107239] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 764.107404] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.159155] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e45a72-598a-b1d6-46d0-3b8fff7a08b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010343} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.160488] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f862be9-fc5a-4389-9f75-4f213fb9c925 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.168505] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 764.168505] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528eab39-6d8f-45ed-200b-8f687584088a" [ 764.168505] env[62109]: _type = "Task" [ 764.168505] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.179163] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528eab39-6d8f-45ed-200b-8f687584088a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.316283] env[62109]: DEBUG nova.compute.manager [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Received event network-vif-plugged-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 764.316661] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] Acquiring lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.316920] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.317158] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.317381] env[62109]: DEBUG nova.compute.manager [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] No waiting events found dispatching network-vif-plugged-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 764.317591] env[62109]: WARNING nova.compute.manager [req-2b19245a-11dc-4f43-ad6a-60c059094ce5 req-558be3cc-5a9f-41e5-b06c-dfd63cf944ac service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Received unexpected event network-vif-plugged-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 for instance with vm_state building and task_state spawning. [ 764.324310] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116313, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.426329] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216e453-fdba-a700-c7d9-446fa5401204, 'name': SearchDatastore_Task, 'duration_secs': 0.017286} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.426329] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.426600] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.426735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.555815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.556071] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.556266] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.594009] env[62109]: DEBUG oslo_vmware.api [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116312, 'name': RemoveSnapshot_Task, 'duration_secs': 0.736139} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.597359] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 764.597629] env[62109]: INFO nova.compute.manager [None req-a5c6e01e-f214-4744-ab08-7a4c87fd49df tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 16.12 seconds to snapshot the instance on the hypervisor. [ 764.612912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.656345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0250e976-9f4e-496d-a948-503ab9b9cb8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.665840] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c045bb3b-fcfa-492d-b5ca-6e355924eb9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.708881] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528eab39-6d8f-45ed-200b-8f687584088a, 'name': SearchDatastore_Task, 'duration_secs': 0.011039} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.709386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.709655] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9/dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 764.710829] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c6e717-4967-410c-8b30-4b7f224ce9df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.713206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.713409] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.713606] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1648ea0b-e609-4159-8843-2ea83e5ac28e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.715805] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-762d9117-1d61-46ef-9cc2-18802228b4d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.724644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0cc73-541e-4467-b6b9-841d41647ecc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.729064] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 764.729064] env[62109]: value = "task-1116314" [ 764.729064] env[62109]: _type = "Task" [ 764.729064] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.730136] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.730311] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 764.734119] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a07c906e-f837-409d-8bb8-f5322a8ea4fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.744971] env[62109]: DEBUG nova.compute.provider_tree [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.751540] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.752838] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 764.752838] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52759c71-9564-9136-5d9f-6afca9b2b924" [ 764.752838] env[62109]: _type = "Task" [ 764.752838] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.762321] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52759c71-9564-9136-5d9f-6afca9b2b924, 'name': SearchDatastore_Task, 'duration_secs': 0.009587} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.763089] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a62d1637-8bd2-4a3a-8d38-bc4a00260154 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.768563] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 764.768563] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526a0b8d-6ed4-e1bd-0360-106fb2c737b7" [ 764.768563] env[62109]: _type = "Task" [ 764.768563] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.776985] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526a0b8d-6ed4-e1bd-0360-106fb2c737b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.796950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.797268] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.816394] env[62109]: DEBUG oslo_vmware.api [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116313, 'name': RemoveSnapshot_Task, 'duration_secs': 0.896909} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.816675] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 764.816944] env[62109]: INFO nova.compute.manager [None req-6edb73a0-9b03-4a69-9101-cb9a12beeca5 tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 14.80 seconds to snapshot the instance on the hypervisor. [ 764.859873] env[62109]: DEBUG nova.network.neutron [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updated VIF entry in instance network info cache for port 28469078-1559-4ee4-93a9-9165165a7b4c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 764.860685] env[62109]: DEBUG nova.network.neutron [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updating instance_info_cache with network_info: [{"id": "28469078-1559-4ee4-93a9-9165165a7b4c", "address": "fa:16:3e:67:49:92", "network": {"id": "673cf969-ae87-4e20-a288-4363e937b462", "bridge": "br-int", "label": "tempest-ServersTestJSON-426005598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49e293771a024d8d843ebb4c094722ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28469078-15", "ovs_interfaceid": "28469078-1559-4ee4-93a9-9165165a7b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.087895] env[62109]: DEBUG nova.compute.manager [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Received event network-changed-7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 765.088523] env[62109]: DEBUG nova.compute.manager [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Refreshing instance network info cache due to event network-changed-7bfa3d99-6ca4-4382-a04a-d5ad176b7597. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 765.092856] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Acquiring lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.092856] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Acquired lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.092856] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Refreshing network info cache for port 7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 765.117230] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.245619] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490131} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.245933] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9/dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 765.246171] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.246560] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5209a71-52b1-4848-b3b0-071b32feb998 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.250947] env[62109]: DEBUG nova.scheduler.client.report [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.261051] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 765.261051] env[62109]: value = "task-1116315" [ 765.261051] env[62109]: _type = "Task" [ 765.261051] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.277394] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.284068] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526a0b8d-6ed4-e1bd-0360-106fb2c737b7, 'name': SearchDatastore_Task, 'duration_secs': 0.016734} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.284364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.284633] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c90ace77-5b8b-4b04-aa57-d47ad17df01e/c90ace77-5b8b-4b04-aa57-d47ad17df01e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 765.284916] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ce3a1f7-b366-4c36-a44b-a34f68b58e34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.292694] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 765.292694] env[62109]: value = "task-1116316" [ 765.292694] env[62109]: _type = "Task" [ 765.292694] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.302116] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.366321] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d5830a2-c12c-4c5f-99fc-fa92c15040da req-994cfc61-f40d-46b2-8ddc-166501535c7c service nova] Releasing lock "refresh_cache-8b6ec904-8c68-4eaa-94fe-47a87528e26b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.494783] env[62109]: DEBUG nova.network.neutron [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Updating instance_info_cache with network_info: [{"id": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "address": "fa:16:3e:16:0c:52", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a8c9ef-fc", "ovs_interfaceid": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.755681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.756866] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 765.762301] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.565s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.766496] env[62109]: INFO nova.compute.claims [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.783463] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072708} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.784926] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.785856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e894954-50cf-4e7d-9c69-ec2ebabc16ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.819470] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9/dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.822093] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02c5b7ea-2ac5-4400-9963-4a6aa40ecade {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.845707] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116316, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454122} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.846758] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c90ace77-5b8b-4b04-aa57-d47ad17df01e/c90ace77-5b8b-4b04-aa57-d47ad17df01e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 765.846959] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.847387] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c46db68-65d0-4f47-97f9-47081bfc17dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.853690] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 765.853690] env[62109]: value = "task-1116317" [ 765.853690] env[62109]: _type = "Task" [ 765.853690] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.859538] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 765.859538] env[62109]: value = "task-1116318" [ 765.859538] env[62109]: _type = "Task" [ 765.859538] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.870351] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.880494] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.919609] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Updated VIF entry in instance network info cache for port 7bfa3d99-6ca4-4382-a04a-d5ad176b7597. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 765.921132] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Updating instance_info_cache with network_info: [{"id": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "address": "fa:16:3e:7c:d7:68", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bfa3d99-6c", "ovs_interfaceid": "7bfa3d99-6ca4-4382-a04a-d5ad176b7597", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.998154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.998532] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Instance network_info: |[{"id": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "address": "fa:16:3e:16:0c:52", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a8c9ef-fc", "ovs_interfaceid": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 765.999295] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:0c:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f85835c8-5d0c-4b2f-97c4-6c4006580f79', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91a8c9ef-fce5-4834-b0a8-b3a07bf1c769', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.008472] env[62109]: DEBUG oslo.service.loopingcall [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.008472] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 766.008472] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f83b91a-3f68-41ee-acd5-55017737e68b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.029914] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.029914] env[62109]: value = "task-1116319" [ 766.029914] env[62109]: _type = "Task" [ 766.029914] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.039501] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116319, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.270726] env[62109]: DEBUG nova.compute.utils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 766.272369] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 766.273993] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 766.330824] env[62109]: DEBUG nova.policy [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f1cb3871cdd4fad855d79a8d81757f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b58b418b5812479da127b7d697728a98', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 766.348009] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.348440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.348669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.348859] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.349040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.353183] env[62109]: INFO nova.compute.manager [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Terminating instance [ 766.360371] env[62109]: DEBUG nova.compute.manager [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 766.360371] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 766.363769] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c927bc7-2104-4b72-b891-1f31dbdcd2d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.375475] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.376053] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.376322] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.376570] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.376910] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.378772] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116317, 'name': ReconfigVM_Task, 'duration_secs': 0.302479} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.381550] env[62109]: INFO nova.compute.manager [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Terminating instance [ 766.383202] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Reconfigured VM instance instance-0000002d to attach disk [datastore2] dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9/dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.383859] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 766.384530] env[62109]: DEBUG nova.compute.manager [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 766.384765] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 766.384950] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5174e792-727f-4498-92e5-037e3212b56c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.388940] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cba0d0c6-9d63-4b4e-810a-8e1e97a0d1e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.392360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbeb52d-d179-4c1d-9436-521717b73d1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.398699] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072938} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.400253] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.401113] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6235cf57-a11a-4e69-bb51-76b57dcc79df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.411122] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 766.411122] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 766.411122] env[62109]: value = "task-1116320" [ 766.411122] env[62109]: _type = "Task" [ 766.411122] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.411122] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82d1e63d-e85c-479e-89da-9285e64d572e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.413070] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 766.413070] env[62109]: value = "task-1116321" [ 766.413070] env[62109]: _type = "Task" [ 766.413070] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.437588] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] c90ace77-5b8b-4b04-aa57-d47ad17df01e/c90ace77-5b8b-4b04-aa57-d47ad17df01e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.439162] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Releasing lock "refresh_cache-c90ace77-5b8b-4b04-aa57-d47ad17df01e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.439456] env[62109]: DEBUG nova.compute.manager [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Received event network-changed-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 766.439627] env[62109]: DEBUG nova.compute.manager [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Refreshing instance network info cache due to event network-changed-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 766.439837] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Acquiring lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.439995] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Acquired lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.440193] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Refreshing network info cache for port 91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 766.445811] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55a6f7c2-354b-40d4-90fb-c0d970ea4cb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.469362] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 766.469362] env[62109]: value = "task-1116322" [ 766.469362] env[62109]: _type = "Task" [ 766.469362] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.482171] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116321, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.482638] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116320, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.484475] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 766.484475] env[62109]: value = "task-1116323" [ 766.484475] env[62109]: _type = "Task" [ 766.484475] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.492359] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.498167] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.541966] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116319, 'name': CreateVM_Task, 'duration_secs': 0.445409} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.542273] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 766.542959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.543136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.543607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.543924] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2232a38-acac-4f19-b649-96a84628e87a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.553246] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 766.553246] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52824a75-e7f7-c827-be44-743a890fd096" [ 766.553246] env[62109]: _type = "Task" [ 766.553246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.564288] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52824a75-e7f7-c827-be44-743a890fd096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.717990] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Updated VIF entry in instance network info cache for port 91a8c9ef-fce5-4834-b0a8-b3a07bf1c769. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 766.718501] env[62109]: DEBUG nova.network.neutron [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Updating instance_info_cache with network_info: [{"id": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "address": "fa:16:3e:16:0c:52", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a8c9ef-fc", "ovs_interfaceid": "91a8c9ef-fce5-4834-b0a8-b3a07bf1c769", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.777575] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 766.834177] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Successfully created port: e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.929609] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116320, 'name': Rename_Task, 'duration_secs': 0.2268} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.933567] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.933898] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e5ec09e-c827-4ed6-81f8-1600dfcd0104 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.944143] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116321, 'name': PowerOffVM_Task, 'duration_secs': 0.232758} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.944387] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 766.944725] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 766.945926] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-502c33a7-004c-401f-91a6-676b32e2ca7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.947558] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 766.947558] env[62109]: value = "task-1116324" [ 766.947558] env[62109]: _type = "Task" [ 766.947558] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.959969] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.984315] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116322, 'name': PowerOffVM_Task, 'duration_secs': 0.279185} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.984790] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 766.988021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 766.988021] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61c5f6e2-f0bf-4e65-810a-571a2fdaae16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.001803] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116323, 'name': ReconfigVM_Task, 'duration_secs': 0.358345} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.002291] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Reconfigured VM instance instance-0000002e to attach disk [datastore2] c90ace77-5b8b-4b04-aa57-d47ad17df01e/c90ace77-5b8b-4b04-aa57-d47ad17df01e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.003386] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d40ea62-5166-4f14-b5f8-9fa62cec2eda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.014227] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 767.014227] env[62109]: value = "task-1116327" [ 767.014227] env[62109]: _type = "Task" [ 767.014227] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.015910] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 767.016305] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 767.016607] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Deleting the datastore file [datastore2] f6d3a50c-bcc3-4a6f-969f-4e629646f427 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 767.022108] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-566737df-50e3-4882-aaac-e062d02a5a87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.031714] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116327, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.032307] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for the task: (returnval){ [ 767.032307] env[62109]: value = "task-1116328" [ 767.032307] env[62109]: _type = "Task" [ 767.032307] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.048393] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.067242] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52824a75-e7f7-c827-be44-743a890fd096, 'name': SearchDatastore_Task, 'duration_secs': 0.018748} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.067597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.067895] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 767.068160] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.068253] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.068402] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 767.071182] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb2634c5-97ef-44ec-8256-b97c0c4f5fcf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.074510] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 767.074798] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 767.074985] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Deleting the datastore file [datastore2] 8584eb2c-57a3-455e-9d3c-877286e23ccc {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 767.076603] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93e80888-1fdd-4847-90db-37b1f3990aee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.084747] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for the task: (returnval){ [ 767.084747] env[62109]: value = "task-1116329" [ 767.084747] env[62109]: _type = "Task" [ 767.084747] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.085316] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 767.085488] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 767.089347] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-429bf05e-d059-48e1-96a1-bf4daa02419f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.102165] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.103606] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 767.103606] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e5a1-b9ff-c4a9-d235-7dae7e5431f2" [ 767.103606] env[62109]: _type = "Task" [ 767.103606] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.117276] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e5a1-b9ff-c4a9-d235-7dae7e5431f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.221834] env[62109]: DEBUG oslo_concurrency.lockutils [req-1e4c5645-d368-45d4-812b-dfcd5bbaeaa9 req-a6d5dd8e-11cc-46c4-bb6c-48e9fa7e9477 service nova] Releasing lock "refresh_cache-c44d618e-c781-47ba-b191-cecc01dcfe9b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.247028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a7ddb9-65d5-45d4-be09-4cfd67884d53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.257279] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2cfa02-5e73-400a-8d00-8f943ecc9894 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.302049] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6eb376-03f2-4baf-9076-5b72c827287f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.310041] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a176f001-adc1-4aaa-ac54-3a983db1f6d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.326068] env[62109]: DEBUG nova.compute.provider_tree [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.462812] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116324, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.528951] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116327, 'name': Rename_Task, 'duration_secs': 0.192286} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.529449] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 767.529814] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-963731b5-6243-419a-8fe2-16ee892e3a66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.539644] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 767.539644] env[62109]: value = "task-1116330" [ 767.539644] env[62109]: _type = "Task" [ 767.539644] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.548949] env[62109]: DEBUG oslo_vmware.api [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Task: {'id': task-1116328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201963} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.549800] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.550135] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 767.550443] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.550741] env[62109]: INFO nova.compute.manager [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 1.19 seconds to destroy the instance on the hypervisor. [ 767.551126] env[62109]: DEBUG oslo.service.loopingcall [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.551440] env[62109]: DEBUG nova.compute.manager [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.551607] env[62109]: DEBUG nova.network.neutron [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.558060] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.597152] env[62109]: DEBUG oslo_vmware.api [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Task: {'id': task-1116329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192388} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.598201] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.598201] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 767.598201] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.598201] env[62109]: INFO nova.compute.manager [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Took 1.21 seconds to destroy the instance on the hypervisor. [ 767.598201] env[62109]: DEBUG oslo.service.loopingcall [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.598438] env[62109]: DEBUG nova.compute.manager [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.598478] env[62109]: DEBUG nova.network.neutron [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.615715] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e5a1-b9ff-c4a9-d235-7dae7e5431f2, 'name': SearchDatastore_Task, 'duration_secs': 0.023438} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.616311] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-237c77d8-904d-43e2-8dcc-78439688b668 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.622802] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 767.622802] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb202-263a-0290-3545-792135264baf" [ 767.622802] env[62109]: _type = "Task" [ 767.622802] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.632153] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb202-263a-0290-3545-792135264baf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.668365] env[62109]: DEBUG nova.compute.manager [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 767.669313] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2056b4-f3d0-4430-8cea-ae89b3c9b467 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.804707] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 767.829685] env[62109]: DEBUG nova.scheduler.client.report [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 767.834804] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.836132] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.836132] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.836132] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.836132] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.836132] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.836487] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.836487] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.836487] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.836649] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.836798] env[62109]: DEBUG nova.virt.hardware [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.837777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1585a9da-3915-4c79-8a89-b371728ce83d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.848646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419910e2-89ee-4f61-853f-b41438a8ce87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.962986] env[62109]: DEBUG oslo_vmware.api [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116324, 'name': PowerOnVM_Task, 'duration_secs': 0.804246} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.962986] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 767.962986] env[62109]: INFO nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Took 11.87 seconds to spawn the instance on the hypervisor. [ 767.963602] env[62109]: DEBUG nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 767.964509] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40f8ac0-c51c-4617-9ed1-a92afab5efea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.051351] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116330, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.111063] env[62109]: DEBUG nova.compute.manager [req-2f9a2f12-e6cb-4501-826e-9397d2b8f27e req-4b79b87d-2303-4650-8ad9-75bb007200eb service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Received event network-vif-deleted-024ef821-8029-4165-92f1-25cab3da46ce {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 768.111350] env[62109]: INFO nova.compute.manager [req-2f9a2f12-e6cb-4501-826e-9397d2b8f27e req-4b79b87d-2303-4650-8ad9-75bb007200eb service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Neutron deleted interface 024ef821-8029-4165-92f1-25cab3da46ce; detaching it from the instance and deleting it from the info cache [ 768.111569] env[62109]: DEBUG nova.network.neutron [req-2f9a2f12-e6cb-4501-826e-9397d2b8f27e req-4b79b87d-2303-4650-8ad9-75bb007200eb service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.137401] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb202-263a-0290-3545-792135264baf, 'name': SearchDatastore_Task, 'duration_secs': 0.025435} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.137401] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.137561] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c44d618e-c781-47ba-b191-cecc01dcfe9b/c44d618e-c781-47ba-b191-cecc01dcfe9b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 768.139648] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95ce6ae1-d4bc-4dd0-a621-c93122f15b99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.148571] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 768.148571] env[62109]: value = "task-1116331" [ 768.148571] env[62109]: _type = "Task" [ 768.148571] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.163403] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.181508] env[62109]: INFO nova.compute.manager [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] instance snapshotting [ 768.184500] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42164009-b576-48bb-918e-dd509f20dbe2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.204058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcf3260-1c12-4870-bf82-1fb6d0c0749e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.342792] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.343289] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.346335] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.210s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.347805] env[62109]: INFO nova.compute.claims [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.489760] env[62109]: DEBUG nova.network.neutron [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.491129] env[62109]: INFO nova.compute.manager [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Took 37.17 seconds to build instance. [ 768.505188] env[62109]: DEBUG nova.network.neutron [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.552123] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116330, 'name': PowerOnVM_Task, 'duration_secs': 0.651197} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.552829] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 768.553307] env[62109]: INFO nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Took 7.85 seconds to spawn the instance on the hypervisor. [ 768.553695] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 768.554652] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c715bf-d767-4aaa-964c-43c561a0c68b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.614713] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4b7f090-079f-4b8a-b0c1-7ea3f7d2999e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.627351] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6257b875-7898-494d-be49-5d2ebf505a85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.640665] env[62109]: DEBUG nova.compute.manager [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received event network-vif-plugged-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 768.641016] env[62109]: DEBUG oslo_concurrency.lockutils [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] Acquiring lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.641319] env[62109]: DEBUG oslo_concurrency.lockutils [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.641525] env[62109]: DEBUG oslo_concurrency.lockutils [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.641753] env[62109]: DEBUG nova.compute.manager [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] No waiting events found dispatching network-vif-plugged-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 768.642056] env[62109]: WARNING nova.compute.manager [req-844ef554-1af1-4c76-b77b-5f91328182a9 req-cd752a5b-5afa-4e76-aff9-9816b558a2f4 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received unexpected event network-vif-plugged-e350a1ec-a026-4ac9-80fd-14259052b3b2 for instance with vm_state building and task_state spawning. [ 768.670357] env[62109]: DEBUG nova.compute.manager [req-2f9a2f12-e6cb-4501-826e-9397d2b8f27e req-4b79b87d-2303-4650-8ad9-75bb007200eb service nova] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Detach interface failed, port_id=024ef821-8029-4165-92f1-25cab3da46ce, reason: Instance f6d3a50c-bcc3-4a6f-969f-4e629646f427 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 768.670948] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116331, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.715247] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 768.715640] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-acddca87-0b24-4cb2-a799-8e2ed6cbca3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.725362] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 768.725362] env[62109]: value = "task-1116332" [ 768.725362] env[62109]: _type = "Task" [ 768.725362] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.734537] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116332, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.770030] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Successfully updated port: e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 768.853367] env[62109]: DEBUG nova.compute.utils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.858638] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 768.858971] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 768.903755] env[62109]: DEBUG nova.policy [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5442deec924240babb834fc704d53cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a363548894df47d5981199004e9884de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 768.992746] env[62109]: INFO nova.compute.manager [-] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Took 1.44 seconds to deallocate network for instance. [ 768.994472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b836e9fc-2c71-44cc-9e5a-ddaac581357e tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.218s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.007027] env[62109]: INFO nova.compute.manager [-] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Took 1.41 seconds to deallocate network for instance. [ 769.073293] env[62109]: INFO nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Took 34.49 seconds to build instance. [ 769.167509] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805453} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.170200] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c44d618e-c781-47ba-b191-cecc01dcfe9b/c44d618e-c781-47ba-b191-cecc01dcfe9b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 769.170200] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.170441] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23d04aa1-881f-47a5-82b6-fa1f70a6d062 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.179595] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 769.179595] env[62109]: value = "task-1116333" [ 769.179595] env[62109]: _type = "Task" [ 769.179595] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.190497] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116333, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.236805] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116332, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.273678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.273823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquired lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.273969] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 769.305106] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Successfully created port: 7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.366655] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.503072] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.506525] env[62109]: DEBUG nova.compute.manager [None req-319a15b7-6a37-4fc4-a6fb-c8c63fe28fe6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 18a86082-f234-44ab-81e1-b215f284385f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.516841] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.575538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.516s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.697246] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067179} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.697409] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.698494] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ae0610-10c7-4ac8-8ff5-5aba7ae3a408 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.731144] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c44d618e-c781-47ba-b191-cecc01dcfe9b/c44d618e-c781-47ba-b191-cecc01dcfe9b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.734174] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-168b413e-38ec-45f7-a806-a1277129b5b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.759121] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116332, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.760490] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 769.760490] env[62109]: value = "task-1116334" [ 769.760490] env[62109]: _type = "Task" [ 769.760490] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.772299] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116334, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.806144] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2548c91d-e1bd-4e1a-bf04-8d6cc3231350 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.809991] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 769.815123] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6a943b-c942-4a1d-b294-c51d03548288 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.849602] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7377cf5d-e9f6-4843-9050-d9f905d05264 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.859950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a562719d-7dec-4178-ae38-df6c41098eec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.877814] env[62109]: DEBUG nova.compute.provider_tree [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.927091] env[62109]: DEBUG nova.compute.manager [None req-1e780e11-d9c6-4425-b343-b085962306a1 tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 769.929075] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6c2c56-2824-45b2-9c71-55e8e1423e33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.990030] env[62109]: DEBUG nova.network.neutron [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updating instance_info_cache with network_info: [{"id": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "address": "fa:16:3e:a7:e1:a4", "network": {"id": "18dd65d1-24ed-4b8e-b0a2-49069a00b5bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-194877863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b58b418b5812479da127b7d697728a98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape350a1ec-a0", "ovs_interfaceid": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.010696] env[62109]: DEBUG nova.compute.manager [None req-319a15b7-6a37-4fc4-a6fb-c8c63fe28fe6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 18a86082-f234-44ab-81e1-b215f284385f] Instance disappeared before build. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 770.078940] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 770.134924] env[62109]: DEBUG nova.compute.manager [req-5abfd5eb-a947-4479-9b13-73d6efbe2b47 req-f57626f7-0478-4ca8-b2e7-323c570bf849 service nova] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Received event network-vif-deleted-374c820b-ad09-4e55-88f1-e117b8123aeb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.205168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.205387] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.205681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.205881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.206117] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.208429] env[62109]: INFO nova.compute.manager [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Terminating instance [ 770.209874] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.210043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquired lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.210220] env[62109]: DEBUG nova.network.neutron [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 770.243966] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116332, 'name': CreateSnapshot_Task, 'duration_secs': 1.12673} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.244250] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 770.245039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c9981c-7368-4734-81b9-94db2d221079 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.275431] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.380585] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.384886] env[62109]: DEBUG nova.scheduler.client.report [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.408434] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.408747] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.408996] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.409163] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.409356] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.409549] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.409809] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.410027] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.410245] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.410438] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.410656] env[62109]: DEBUG nova.virt.hardware [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.411939] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dca6f42-c947-41da-86ed-03c1c3ddf5d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.422021] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b2cbb-0231-4438-9227-79bbb42b905d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.441019] env[62109]: INFO nova.compute.manager [None req-1e780e11-d9c6-4425-b343-b085962306a1 tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] instance snapshotting [ 770.441608] env[62109]: DEBUG nova.objects.instance [None req-1e780e11-d9c6-4425-b343-b085962306a1 tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lazy-loading 'flavor' on Instance uuid dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 770.493732] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Releasing lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.494148] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Instance network_info: |[{"id": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "address": "fa:16:3e:a7:e1:a4", "network": {"id": "18dd65d1-24ed-4b8e-b0a2-49069a00b5bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-194877863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b58b418b5812479da127b7d697728a98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape350a1ec-a0", "ovs_interfaceid": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 770.494618] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:e1:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26472e27-9835-4f87-ab7f-ca24dfee4e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e350a1ec-a026-4ac9-80fd-14259052b3b2', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.502920] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Creating folder: Project (b58b418b5812479da127b7d697728a98). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 770.502920] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7db2721-c649-4062-84e4-678422a922d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.520658] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Created folder: Project (b58b418b5812479da127b7d697728a98) in parent group-v244329. [ 770.520900] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Creating folder: Instances. Parent ref: group-v244399. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 770.521208] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdee2b52-41bc-44f5-a2b6-b7ce65604df7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.526248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-319a15b7-6a37-4fc4-a6fb-c8c63fe28fe6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "18a86082-f234-44ab-81e1-b215f284385f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.060s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.533853] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Created folder: Instances in parent group-v244399. [ 770.534187] env[62109]: DEBUG oslo.service.loopingcall [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.534405] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 770.534624] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8db2918e-3368-41bf-80a8-b80d316d26ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.558163] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.558163] env[62109]: value = "task-1116337" [ 770.558163] env[62109]: _type = "Task" [ 770.558163] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.567154] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116337, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.603506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.659856] env[62109]: DEBUG nova.compute.manager [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.660098] env[62109]: DEBUG nova.compute.manager [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing instance network info cache due to event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 770.660326] env[62109]: DEBUG oslo_concurrency.lockutils [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] Acquiring lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.660481] env[62109]: DEBUG oslo_concurrency.lockutils [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] Acquired lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.660626] env[62109]: DEBUG nova.network.neutron [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 770.745944] env[62109]: DEBUG nova.network.neutron [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 770.764713] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 770.768036] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-083dde07-07ef-48d8-a861-53dba51d7b93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.786019] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116334, 'name': ReconfigVM_Task, 'duration_secs': 0.726986} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.786019] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c44d618e-c781-47ba-b191-cecc01dcfe9b/c44d618e-c781-47ba-b191-cecc01dcfe9b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.786019] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 770.786019] env[62109]: value = "task-1116338" [ 770.786019] env[62109]: _type = "Task" [ 770.786019] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.787141] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3cd3f8b-3f53-4773-9896-e6a2276c7fbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.799885] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116338, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.802289] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 770.802289] env[62109]: value = "task-1116339" [ 770.802289] env[62109]: _type = "Task" [ 770.802289] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.812558] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116339, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.830798] env[62109]: DEBUG nova.network.neutron [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.895113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.897225] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 770.903314] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.285s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.905156] env[62109]: INFO nova.compute.claims [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.947908] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5303ff7d-7d94-4762-968b-aed84ccf62d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.976302] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cd4daf-db72-442e-938b-a6f92a6a435a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.031830] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 771.071696] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116337, 'name': CreateVM_Task, 'duration_secs': 0.379083} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.071696] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 771.072753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.073572] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.073572] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 771.074083] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abb24f80-0562-4e8b-8b53-adcc7fa7a973 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.081047] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 771.081047] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3fe85-b90a-ec90-8625-52f2286bd351" [ 771.081047] env[62109]: _type = "Task" [ 771.081047] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.091272] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3fe85-b90a-ec90-8625-52f2286bd351, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.177198] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Successfully updated port: 7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.301510] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116338, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.317882] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116339, 'name': Rename_Task, 'duration_secs': 0.187364} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.318175] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 771.318469] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74b56b99-c63d-40c6-bc61-f854098efca7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.328167] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 771.328167] env[62109]: value = "task-1116340" [ 771.328167] env[62109]: _type = "Task" [ 771.328167] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.333903] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Releasing lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.334409] env[62109]: DEBUG nova.compute.manager [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.334644] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 771.338837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4244a801-996a-48f4-9330-ec953c8ee77a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.341901] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.348157] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 771.348310] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db5a2377-64e0-408e-80d9-0e8aff40bbd7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.355542] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 771.355542] env[62109]: value = "task-1116341" [ 771.355542] env[62109]: _type = "Task" [ 771.355542] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.366770] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.389043] env[62109]: DEBUG nova.network.neutron [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updated VIF entry in instance network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 771.389043] env[62109]: DEBUG nova.network.neutron [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updating instance_info_cache with network_info: [{"id": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "address": "fa:16:3e:a7:e1:a4", "network": {"id": "18dd65d1-24ed-4b8e-b0a2-49069a00b5bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-194877863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b58b418b5812479da127b7d697728a98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape350a1ec-a0", "ovs_interfaceid": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.413371] env[62109]: DEBUG nova.compute.utils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 771.414855] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 771.415043] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 771.477506] env[62109]: DEBUG nova.policy [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53d26e0515864175963777284ca6d342', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88ac845ffffe44d9a1127254f08fce91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.490242] env[62109]: DEBUG nova.compute.manager [None req-1e780e11-d9c6-4425-b343-b085962306a1 tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance disappeared during snapshot {{(pid=62109) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 771.561194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.596448] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3fe85-b90a-ec90-8625-52f2286bd351, 'name': SearchDatastore_Task, 'duration_secs': 0.012312} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.596656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.596979] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.597729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.597729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.597729] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.599134] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec4561f8-223b-41b3-ab53-cf71c5f6a3e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.608740] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.608955] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 771.610011] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f88617f-17d8-4991-8a94-017bb07ed9cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.617705] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 771.617705] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d238bf-5afb-0250-d0c6-3c224e5f7b04" [ 771.617705] env[62109]: _type = "Task" [ 771.617705] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.628269] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d238bf-5afb-0250-d0c6-3c224e5f7b04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.647783] env[62109]: DEBUG nova.compute.manager [None req-1e780e11-d9c6-4425-b343-b085962306a1 tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Found 0 images (rotation: 2) {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 771.679551] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.679711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.679867] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 771.786929] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Successfully created port: de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.808316] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116338, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.839149] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116340, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.865709] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116341, 'name': PowerOffVM_Task, 'duration_secs': 0.296479} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.866104] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 771.866344] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 771.866638] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77975bc9-f465-4269-9f6b-f08de602d0a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.891341] env[62109]: DEBUG oslo_concurrency.lockutils [req-1a71a2d3-5e1b-4212-9866-71ec2fd369fc req-5f4904ee-c9dd-4ffe-8557-f145546422fa service nova] Releasing lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.893138] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 771.893345] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 771.893530] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Deleting the datastore file [datastore2] dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.893786] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e558ee2d-d4c3-4370-a417-2931dc3be665 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.903948] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for the task: (returnval){ [ 771.903948] env[62109]: value = "task-1116343" [ 771.903948] env[62109]: _type = "Task" [ 771.903948] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.920281] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.921306] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 772.131054] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d238bf-5afb-0250-d0c6-3c224e5f7b04, 'name': SearchDatastore_Task, 'duration_secs': 0.017025} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.132241] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42e24e7-e659-4b1c-9c8e-ba7fe1712ef0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.138820] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 772.138820] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521a4cd8-8e24-6812-04c5-eaa8357c1ff1" [ 772.138820] env[62109]: _type = "Task" [ 772.138820] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.145541] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521a4cd8-8e24-6812-04c5-eaa8357c1ff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.301859] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116338, 'name': CloneVM_Task, 'duration_secs': 1.405494} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.301859] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Created linked-clone VM from snapshot [ 772.301859] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a03e1af-6686-439b-a649-699359d9896c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.310968] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Uploading image cf26e32d-8ae4-4b8a-8c9a-067056eb2211 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 772.329735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e585842-594f-4b4c-a4ad-4c0353fca823 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.334107] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 772.334107] env[62109]: value = "vm-244402" [ 772.334107] env[62109]: _type = "VirtualMachine" [ 772.334107] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 772.336874] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6eccca3b-baa9-4984-825c-e3f2a17ddedf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.345746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ce421a-b199-4d9d-872b-4b06b7dc998d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.349452] env[62109]: DEBUG oslo_vmware.api [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116340, 'name': PowerOnVM_Task, 'duration_secs': 0.681497} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.350368] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 772.350605] env[62109]: INFO nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Took 9.14 seconds to spawn the instance on the hypervisor. [ 772.350789] env[62109]: DEBUG nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 772.351128] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease: (returnval){ [ 772.351128] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52017819-23dd-9842-4f92-880b1d353e10" [ 772.351128] env[62109]: _type = "HttpNfcLease" [ 772.351128] env[62109]: } obtained for exporting VM: (result){ [ 772.351128] env[62109]: value = "vm-244402" [ 772.351128] env[62109]: _type = "VirtualMachine" [ 772.351128] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 772.351356] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the lease: (returnval){ [ 772.351356] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52017819-23dd-9842-4f92-880b1d353e10" [ 772.351356] env[62109]: _type = "HttpNfcLease" [ 772.351356] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 772.352414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f660b3-cc05-406d-87d9-4a970299fc76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.391704] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd91afb-4435-4bea-aab0-af1fb7b24812 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.400843] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 772.400843] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52017819-23dd-9842-4f92-880b1d353e10" [ 772.400843] env[62109]: _type = "HttpNfcLease" [ 772.400843] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 772.401405] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 772.401405] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52017819-23dd-9842-4f92-880b1d353e10" [ 772.401405] env[62109]: _type = "HttpNfcLease" [ 772.401405] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 772.402161] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0978abe-5822-4de4-abae-8915cc113a10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.410258] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd85cda1-dde6-4738-b59e-6f09c9ff93de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.416802] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 772.417127] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 772.420595] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 772.441326] env[62109]: DEBUG oslo_vmware.api [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Task: {'id': task-1116343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149121} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.441929] env[62109]: DEBUG nova.compute.provider_tree [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.501229] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.501480] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 772.501754] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 772.501889] env[62109]: INFO nova.compute.manager [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 772.502150] env[62109]: DEBUG oslo.service.loopingcall [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.505416] env[62109]: DEBUG nova.scheduler.client.report [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.512598] env[62109]: DEBUG nova.compute.manager [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 772.512709] env[62109]: DEBUG nova.network.neutron [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 772.529909] env[62109]: DEBUG nova.network.neutron [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 772.551355] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-29a19137-b9bc-4b00-a865-e3c97f673d95 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.648379] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521a4cd8-8e24-6812-04c5-eaa8357c1ff1, 'name': SearchDatastore_Task, 'duration_secs': 0.026096} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.648674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.648937] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 7f40cdc8-3421-47b7-b148-ff6417105dbb/7f40cdc8-3421-47b7-b148-ff6417105dbb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 772.649285] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b95f80d7-409e-4432-a435-5c50b2ed15ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.656384] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 772.656384] env[62109]: value = "task-1116345" [ 772.656384] env[62109]: _type = "Task" [ 772.656384] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.666392] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.733164] env[62109]: DEBUG nova.compute.manager [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Received event network-vif-plugged-7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.733164] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.733164] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.733164] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.733406] env[62109]: DEBUG nova.compute.manager [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] No waiting events found dispatching network-vif-plugged-7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 772.733675] env[62109]: WARNING nova.compute.manager [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Received unexpected event network-vif-plugged-7c4891b0-c525-4571-aa3b-47cc9a42d8ac for instance with vm_state building and task_state spawning. [ 772.733892] env[62109]: DEBUG nova.compute.manager [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Received event network-changed-7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.734091] env[62109]: DEBUG nova.compute.manager [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Refreshing instance network info cache due to event network-changed-7c4891b0-c525-4571-aa3b-47cc9a42d8ac. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 772.734276] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.743703] env[62109]: DEBUG nova.network.neutron [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.910947] env[62109]: INFO nova.compute.manager [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Took 35.80 seconds to build instance. [ 773.002910] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 773.010714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.011770] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 773.014022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.695s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.016300] env[62109]: INFO nova.compute.claims [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.028129] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:53:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='909148fc-609f-4b07-aa86-4e8ce4ae4b3f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-757017005',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 773.028491] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 773.028664] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.028922] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 773.029180] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.029356] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 773.029581] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 773.029823] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 773.030076] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 773.030319] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 773.030563] env[62109]: DEBUG nova.virt.hardware [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 773.031539] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0704b0-6a66-4725-b4e9-a62dff6a8156 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.035627] env[62109]: DEBUG nova.network.neutron [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.045307] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6984510c-5403-4600-b631-c420ca7bdf6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.169390] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116345, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.249653] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.249949] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Instance network_info: |[{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 773.251178] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.251405] env[62109]: DEBUG nova.network.neutron [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Refreshing network info cache for port 7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 773.254909] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:01:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c4891b0-c525-4571-aa3b-47cc9a42d8ac', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.263561] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating folder: Project (a363548894df47d5981199004e9884de). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 773.265053] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7952f66-85e6-4388-9709-f5fae5c68939 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.278745] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created folder: Project (a363548894df47d5981199004e9884de) in parent group-v244329. [ 773.279643] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating folder: Instances. Parent ref: group-v244403. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 773.279944] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-753d3ace-f79c-4ae8-82ab-e603ea8e7e61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.294401] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created folder: Instances in parent group-v244403. [ 773.294401] env[62109]: DEBUG oslo.service.loopingcall [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.295188] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 773.295917] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92de9664-7bb1-43bd-8b2c-173eaee954f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.320481] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.320481] env[62109]: value = "task-1116348" [ 773.320481] env[62109]: _type = "Task" [ 773.320481] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.329892] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116348, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.384168] env[62109]: DEBUG nova.compute.manager [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Received event network-vif-plugged-de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.384168] env[62109]: DEBUG oslo_concurrency.lockutils [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] Acquiring lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.384168] env[62109]: DEBUG oslo_concurrency.lockutils [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.384168] env[62109]: DEBUG oslo_concurrency.lockutils [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.384168] env[62109]: DEBUG nova.compute.manager [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] No waiting events found dispatching network-vif-plugged-de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 773.384544] env[62109]: WARNING nova.compute.manager [req-68f95e38-f488-414b-891a-6e173caa1645 req-dff7ae68-6e6e-4065-a811-87abfd7d92ad service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Received unexpected event network-vif-plugged-de4056dc-a527-43f0-ad81-f82e5cb00f86 for instance with vm_state building and task_state spawning. [ 773.414461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-80a21651-4d92-4763-b166-7a52bb22025c tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.321s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.486745] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Successfully updated port: de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.521450] env[62109]: DEBUG nova.compute.utils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 773.523049] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 773.523049] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 773.540145] env[62109]: INFO nova.compute.manager [-] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Took 1.03 seconds to deallocate network for instance. [ 773.570898] env[62109]: DEBUG nova.policy [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c1e62b99a564244ae23197527e54468', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3fca01fd5c34e6a8fc372bab3e500b0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 773.673704] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700712} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.674179] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 7f40cdc8-3421-47b7-b148-ff6417105dbb/7f40cdc8-3421-47b7-b148-ff6417105dbb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 773.674553] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.674925] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-674a65e7-7184-40f1-b398-2af5d2aa8ba8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.683046] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 773.683046] env[62109]: value = "task-1116349" [ 773.683046] env[62109]: _type = "Task" [ 773.683046] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.686717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.689358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.689358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.689358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.689358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.690142] env[62109]: INFO nova.compute.manager [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Terminating instance [ 773.692777] env[62109]: DEBUG nova.compute.manager [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 773.693256] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 773.694635] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e8f088-0951-4f22-bd61-196191e2c5ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.702148] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.707281] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 773.708272] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d82065dc-e6da-4404-ab50-4cd7607f767e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.716468] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 773.716468] env[62109]: value = "task-1116350" [ 773.716468] env[62109]: _type = "Task" [ 773.716468] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.727950] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.795801] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.797559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.798017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.798415] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.798796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.801410] env[62109]: INFO nova.compute.manager [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Terminating instance [ 773.804749] env[62109]: DEBUG nova.compute.manager [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 773.805097] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 773.806513] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3ce61c-4531-4bc4-bde9-dd7cf22c3513 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.815661] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 773.816209] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c83a0014-606f-4742-91fa-8a2a32b689cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.829764] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 773.829764] env[62109]: value = "task-1116351" [ 773.829764] env[62109]: _type = "Task" [ 773.829764] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.839064] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116348, 'name': CreateVM_Task, 'duration_secs': 0.50966} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.840539] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 773.841981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.842082] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.842655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 773.843045] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-843c9d4f-23bc-4a1e-b470-512be1e98eb9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.849020] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.852780] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 773.852780] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52214a36-6727-116f-1bc3-76702021fd7f" [ 773.852780] env[62109]: _type = "Task" [ 773.852780] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.865369] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52214a36-6727-116f-1bc3-76702021fd7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.919162] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 773.975110] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Successfully created port: 3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.993704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.994340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.994537] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 774.029091] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 774.048518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.196390] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071877} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.197103] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.197989] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958b0308-b366-4af0-8d9d-5b7541cf543c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.221660] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 7f40cdc8-3421-47b7-b148-ff6417105dbb/7f40cdc8-3421-47b7-b148-ff6417105dbb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.227521] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f8dc28a-2c3a-454b-851f-b5c3479547ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.253509] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116350, 'name': PowerOffVM_Task, 'duration_secs': 0.247256} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.258545] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 774.259970] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 774.260481] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 774.260481] env[62109]: value = "task-1116352" [ 774.260481] env[62109]: _type = "Task" [ 774.260481] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.261146] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acd84c9f-f965-4ec4-bedd-c9fb3822400e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.271610] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116352, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.343947] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116351, 'name': PowerOffVM_Task, 'duration_secs': 0.235153} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.344385] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 774.344829] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 774.344995] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-445a1b58-92a1-4df3-92c2-5e862e04ce7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.351318] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.351614] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.351862] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleting the datastore file [datastore2] c90ace77-5b8b-4b04-aa57-d47ad17df01e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.353060] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7abc743c-10d4-4d34-9f23-cb1d5de2a716 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.365499] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52214a36-6727-116f-1bc3-76702021fd7f, 'name': SearchDatastore_Task, 'duration_secs': 0.012558} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.370113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.370531] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.370945] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.371247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.371675] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.372152] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 774.372152] env[62109]: value = "task-1116355" [ 774.372152] env[62109]: _type = "Task" [ 774.372152] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.373178] env[62109]: DEBUG nova.network.neutron [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updated VIF entry in instance network info cache for port 7c4891b0-c525-4571-aa3b-47cc9a42d8ac. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 774.373733] env[62109]: DEBUG nova.network.neutron [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.375216] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-510611fb-41b3-4b9a-bb3a-0822daba73de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.387933] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.396282] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.396491] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 774.397269] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba9ad81-858f-4780-b42c-3aa9896440ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.403751] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 774.403751] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e147-bcbc-4bf8-e782-776acde4413f" [ 774.403751] env[62109]: _type = "Task" [ 774.403751] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.412288] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e147-bcbc-4bf8-e782-776acde4413f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.423383] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.423607] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.423902] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleting the datastore file [datastore2] c44d618e-c781-47ba-b191-cecc01dcfe9b {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.424416] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f74dabc0-af05-448b-8edc-cf7c00313ed6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.436757] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 774.436757] env[62109]: value = "task-1116356" [ 774.436757] env[62109]: _type = "Task" [ 774.436757] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.444585] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.456215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.541679] env[62109]: INFO nova.virt.block_device [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Booting with volume 8845b7d9-9e50-489a-94dd-17e0fa9b7a61 at /dev/sda [ 774.550325] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.591155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0219143-4524-4546-b8d9-d4d2414621fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.592875] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01a810c6-3307-4d45-9a8c-6c8b78d7af46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.601468] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af989b32-cc3b-446a-a527-489162a39e7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.607638] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa9f73b-7ab1-423e-9faf-2c4c748786bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.405761] env[62109]: DEBUG oslo_concurrency.lockutils [req-3b22c3b8-f7af-46a2-844b-38dc1cef317e req-daefdfa9-6087-4ac8-bb6c-97d36d1c1f35 service nova] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.429348] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa36a20-19a3-46ce-a1ab-68a62671203c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.434742] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-610170e5-f30b-4327-bec1-cad3de7b2aea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.437057] env[62109]: DEBUG nova.compute.manager [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Received event network-changed-de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.437332] env[62109]: DEBUG nova.compute.manager [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Refreshing instance network info cache due to event network-changed-de4056dc-a527-43f0-ad81-f82e5cb00f86. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 775.437586] env[62109]: DEBUG oslo_concurrency.lockutils [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.446332] env[62109]: DEBUG nova.compute.manager [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Received event network-vif-plugged-3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.446332] env[62109]: DEBUG oslo_concurrency.lockutils [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] Acquiring lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.446332] env[62109]: DEBUG oslo_concurrency.lockutils [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.446332] env[62109]: DEBUG oslo_concurrency.lockutils [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.446332] env[62109]: DEBUG nova.compute.manager [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] No waiting events found dispatching network-vif-plugged-3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 775.446544] env[62109]: WARNING nova.compute.manager [req-b42001a1-acfd-4882-97e0-48aef5a42194 req-092e6a43-265f-476d-8dda-69ecedcef10a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Received unexpected event network-vif-plugged-3cefabfe-3893-464d-ad9b-104d901e71c5 for instance with vm_state building and task_state block_device_mapping. [ 775.460632] env[62109]: DEBUG oslo_vmware.api [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162286} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.460883] env[62109]: DEBUG oslo_vmware.api [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190769} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.461093] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5244e147-bcbc-4bf8-e782-776acde4413f, 'name': SearchDatastore_Task, 'duration_secs': 0.013636} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.461290] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116352, 'name': ReconfigVM_Task, 'duration_secs': 0.332895} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.462764] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.462947] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 775.463166] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 775.463304] env[62109]: INFO nova.compute.manager [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Took 1.66 seconds to destroy the instance on the hypervisor. [ 775.463541] env[62109]: DEBUG oslo.service.loopingcall [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.463726] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.463885] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 775.464064] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 775.464224] env[62109]: INFO nova.compute.manager [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Took 1.77 seconds to destroy the instance on the hypervisor. [ 775.464430] env[62109]: DEBUG oslo.service.loopingcall [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.465220] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 7f40cdc8-3421-47b7-b148-ff6417105dbb/7f40cdc8-3421-47b7-b148-ff6417105dbb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.467679] env[62109]: DEBUG nova.compute.manager [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.467776] env[62109]: DEBUG nova.network.neutron [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.469729] env[62109]: DEBUG nova.compute.manager [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.469817] env[62109]: DEBUG nova.network.neutron [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.471227] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26cf982d-7b2c-43b0-9688-60ce7c790559 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.473813] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7c2a40e-93d7-4652-a9ad-e63047346cb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.477716] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203473b0-924c-4738-a7c1-82382a8f6118 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.491455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e01c865-eac9-42fd-8cb1-fd296d4a0a7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.500065] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 775.500065] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523a3059-4670-e12c-0345-ff0705ed1cca" [ 775.500065] env[62109]: _type = "Task" [ 775.500065] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.510446] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 775.510446] env[62109]: value = "task-1116357" [ 775.510446] env[62109]: _type = "Task" [ 775.510446] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.511274] env[62109]: DEBUG nova.compute.provider_tree [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.528026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6f421d-8a53-44b0-bcdc-ecb899c1d00a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.532802] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523a3059-4670-e12c-0345-ff0705ed1cca, 'name': SearchDatastore_Task, 'duration_secs': 0.010992} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.533782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.534118] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 775.534394] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a56e89d-71d5-4488-bd27-1fb57a357a69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.541706] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116357, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.542871] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a422cae-3910-486a-97a9-0b4a6000ac31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.547517] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 775.547517] env[62109]: value = "task-1116358" [ 775.547517] env[62109]: _type = "Task" [ 775.547517] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.555844] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.559540] env[62109]: DEBUG nova.virt.block_device [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updating existing volume attachment record: 822db352-6f5e-45d4-aa59-446be3a6f7d8 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 775.677407] env[62109]: DEBUG nova.network.neutron [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.008279] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Successfully updated port: 3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 776.022698] env[62109]: DEBUG nova.scheduler.client.report [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.037716] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116357, 'name': Rename_Task, 'duration_secs': 0.147211} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.038162] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 776.038996] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d91021f9-51c8-431a-80cc-f2159b9380d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.046032] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 776.046032] env[62109]: value = "task-1116359" [ 776.046032] env[62109]: _type = "Task" [ 776.046032] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.057085] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.060241] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497624} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.060499] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 776.060711] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.061237] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbab29c3-b600-467b-9721-572acb458c71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.067323] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 776.067323] env[62109]: value = "task-1116360" [ 776.067323] env[62109]: _type = "Task" [ 776.067323] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.075940] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.180196] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.180569] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Instance network_info: |[{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 776.180914] env[62109]: DEBUG oslo_concurrency.lockutils [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.181107] env[62109]: DEBUG nova.network.neutron [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Refreshing network info cache for port de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 776.182568] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:07:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de4056dc-a527-43f0-ad81-f82e5cb00f86', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.190306] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Creating folder: Project (88ac845ffffe44d9a1127254f08fce91). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 776.191431] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b5442a8-93c7-4ad0-bd29-44f1a3793525 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.203905] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Created folder: Project (88ac845ffffe44d9a1127254f08fce91) in parent group-v244329. [ 776.204202] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Creating folder: Instances. Parent ref: group-v244406. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 776.204487] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c158ff35-a1a4-4180-830a-826b95e7b999 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.217289] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Created folder: Instances in parent group-v244406. [ 776.217630] env[62109]: DEBUG oslo.service.loopingcall [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.217860] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 776.218132] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b842065-5525-4f7f-be5c-2af94da182e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.241921] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.241921] env[62109]: value = "task-1116363" [ 776.241921] env[62109]: _type = "Task" [ 776.241921] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.251520] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116363, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.451315] env[62109]: DEBUG nova.network.neutron [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.474302] env[62109]: DEBUG nova.network.neutron [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.511966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.512174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquired lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.512509] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.531990] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.518s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.532677] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 776.536030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.270s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.536030] env[62109]: DEBUG nova.objects.instance [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lazy-loading 'resources' on Instance uuid 1aaa9eae-9183-49d7-a452-4345ad2a9aa0 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 776.555728] env[62109]: DEBUG oslo_vmware.api [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116359, 'name': PowerOnVM_Task, 'duration_secs': 0.464001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.556295] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 776.556411] env[62109]: INFO nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Took 8.75 seconds to spawn the instance on the hypervisor. [ 776.556513] env[62109]: DEBUG nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 776.557361] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364f401d-f729-4199-a25d-889a3edf13e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.577351] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087697} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.577627] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 776.578460] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4d9101-d40e-409b-9d84-3fe0d8fa5315 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.602641] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.603066] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1657711f-5c50-4e84-8175-434ef88a02e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.625867] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 776.625867] env[62109]: value = "task-1116364" [ 776.625867] env[62109]: _type = "Task" [ 776.625867] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.636341] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.753874] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116363, 'name': CreateVM_Task, 'duration_secs': 0.43671} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.754089] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 776.754839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.755056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.755434] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 776.755750] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aab9ed53-2533-4f2c-a1b2-72b87b7b8b21 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.760662] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 776.760662] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ebf6ec-cf7e-29c5-a804-eb00d25d3af3" [ 776.760662] env[62109]: _type = "Task" [ 776.760662] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.770708] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ebf6ec-cf7e-29c5-a804-eb00d25d3af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.904491] env[62109]: DEBUG nova.network.neutron [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updated VIF entry in instance network info cache for port de4056dc-a527-43f0-ad81-f82e5cb00f86. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 776.904883] env[62109]: DEBUG nova.network.neutron [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.953565] env[62109]: INFO nova.compute.manager [-] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Took 1.48 seconds to deallocate network for instance. [ 776.977670] env[62109]: INFO nova.compute.manager [-] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Took 1.51 seconds to deallocate network for instance. [ 777.042950] env[62109]: DEBUG nova.compute.utils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.043605] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 777.043864] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 777.046855] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.083630] env[62109]: INFO nova.compute.manager [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Took 33.47 seconds to build instance. [ 777.113189] env[62109]: DEBUG nova.policy [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0bebd67739843f89887b844ea402d72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c0ab659413e45b1a43747bc7def4daa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 777.139014] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116364, 'name': ReconfigVM_Task, 'duration_secs': 0.387638} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.142203] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.143243] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a0da954-1715-45f1-91cc-1e4c9cc0c4ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.151850] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 777.151850] env[62109]: value = "task-1116365" [ 777.151850] env[62109]: _type = "Task" [ 777.151850] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.162904] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116365, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.275511] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ebf6ec-cf7e-29c5-a804-eb00d25d3af3, 'name': SearchDatastore_Task, 'duration_secs': 0.017404} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.275839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.276116] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.277075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.277075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.277075] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.277239] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf90e63d-d98f-4a8a-a34d-5e78005d4b1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.287310] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.287578] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 777.288358] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20c86915-2ea0-409b-bbd5-108d59a26545 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.295997] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 777.295997] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d18d0a-5684-27cc-9685-c5857a2e5687" [ 777.295997] env[62109]: _type = "Task" [ 777.295997] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.304237] env[62109]: DEBUG nova.network.neutron [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updating instance_info_cache with network_info: [{"id": "3cefabfe-3893-464d-ad9b-104d901e71c5", "address": "fa:16:3e:66:37:ab", "network": {"id": "57e6dd8f-706a-4cf5-837b-38f98964d675", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-742230249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3fca01fd5c34e6a8fc372bab3e500b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cefabfe-38", "ovs_interfaceid": "3cefabfe-3893-464d-ad9b-104d901e71c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.311240] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d18d0a-5684-27cc-9685-c5857a2e5687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.409890] env[62109]: DEBUG oslo_concurrency.lockutils [req-a53e6e9c-9c62-4bcc-8420-c6ed4a1f035d req-561bdfbd-04c0-4a89-9493-627546eca4cf service nova] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.461320] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.487161] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.492593] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e442a1-4b92-4c44-8673-71803e98dd29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.503080] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30f0f93-d825-443e-b285-161c432d4803 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.535046] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Successfully created port: ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 777.539536] env[62109]: DEBUG nova.compute.manager [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Received event network-vif-deleted-7bfa3d99-6ca4-4382-a04a-d5ad176b7597 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 777.539746] env[62109]: DEBUG nova.compute.manager [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Received event network-vif-deleted-91a8c9ef-fce5-4834-b0a8-b3a07bf1c769 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 777.539914] env[62109]: DEBUG nova.compute.manager [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Received event network-changed-3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 777.540137] env[62109]: DEBUG nova.compute.manager [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Refreshing instance network info cache due to event network-changed-3cefabfe-3893-464d-ad9b-104d901e71c5. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 777.540363] env[62109]: DEBUG oslo_concurrency.lockutils [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] Acquiring lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.541703] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36169be5-5b7a-4c9c-9c4b-1684c3eda427 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.548565] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 777.555413] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47fdfd8-3bb6-4007-8acd-b9d7f87fb623 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.570516] env[62109]: DEBUG nova.compute.provider_tree [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.586258] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c84fcff0-1f51-42a8-bf14-2aca43c2e383 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.154s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.663191] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116365, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.673116] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 777.673746] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 777.674689] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 777.677046] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.677316] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 777.677488] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.677648] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 777.677870] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 777.678047] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 777.678224] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 777.678409] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 777.678609] env[62109]: DEBUG nova.virt.hardware [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.679485] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c98d6d6-06af-4ada-a4c2-6ad822296b2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.687344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805fa7f9-2e4f-41bd-abaf-66d7cdb0b269 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.807364] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d18d0a-5684-27cc-9685-c5857a2e5687, 'name': SearchDatastore_Task, 'duration_secs': 0.015333} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.807364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Releasing lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.807563] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance network_info: |[{"id": "3cefabfe-3893-464d-ad9b-104d901e71c5", "address": "fa:16:3e:66:37:ab", "network": {"id": "57e6dd8f-706a-4cf5-837b-38f98964d675", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-742230249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3fca01fd5c34e6a8fc372bab3e500b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cefabfe-38", "ovs_interfaceid": "3cefabfe-3893-464d-ad9b-104d901e71c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 777.808214] env[62109]: DEBUG oslo_concurrency.lockutils [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] Acquired lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.808386] env[62109]: DEBUG nova.network.neutron [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Refreshing network info cache for port 3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 777.810161] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:37:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cefabfe-3893-464d-ad9b-104d901e71c5', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.823279] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Creating folder: Project (e3fca01fd5c34e6a8fc372bab3e500b0). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 777.823486] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b7e42b-fca8-4f2d-9ac0-272d9e0933eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.828113] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-231d3229-651d-4179-946b-28429e38bbbb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.833942] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 777.833942] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521eb506-e6ca-0860-4fc4-48961ba88800" [ 777.833942] env[62109]: _type = "Task" [ 777.833942] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.841880] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521eb506-e6ca-0860-4fc4-48961ba88800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.843295] env[62109]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 777.843567] env[62109]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62109) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 777.843766] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Folder already exists: Project (e3fca01fd5c34e6a8fc372bab3e500b0). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.843954] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Creating folder: Instances. Parent ref: group-v244337. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 777.844233] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feb9a528-f050-49c0-8d8b-90fc1b706c29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.854419] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Created folder: Instances in parent group-v244337. [ 777.854662] env[62109]: DEBUG oslo.service.loopingcall [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.854856] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 777.855146] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e6b4b23-1273-45c2-bef1-282eff460c2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.873767] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.873767] env[62109]: value = "task-1116368" [ 777.873767] env[62109]: _type = "Task" [ 777.873767] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.880997] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116368, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.072855] env[62109]: DEBUG nova.scheduler.client.report [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.094373] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 778.165175] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116365, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.344524] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521eb506-e6ca-0860-4fc4-48961ba88800, 'name': SearchDatastore_Task, 'duration_secs': 0.016806} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.344833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.345120] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 778.345421] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10054be1-0996-4989-b95a-01d216ad0bb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.351895] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 778.351895] env[62109]: value = "task-1116369" [ 778.351895] env[62109]: _type = "Task" [ 778.351895] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.360202] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.383549] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116368, 'name': CreateVM_Task, 'duration_secs': 0.352421} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.383738] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 778.384491] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244340', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'name': 'volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a24f2349-7c1b-441d-a36e-b16dd61f6031', 'attached_at': '', 'detached_at': '', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'serial': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61'}, 'device_type': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'attachment_id': '822db352-6f5e-45d4-aa59-446be3a6f7d8', 'guest_format': None, 'boot_index': 0, 'volume_type': None}], 'swap': None} {{(pid=62109) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 778.384764] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Root volume attach. Driver type: vmdk {{(pid=62109) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 778.385586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267139d0-0254-4f67-b11d-66de65719b7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.394492] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f5c83c-9fe0-4017-a549-5e25d212e8c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.401233] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3531a2fd-c264-4c17-a171-87621292ac44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.407570] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-18d0eb02-1528-4e62-8eed-f24d2920d23e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.417224] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 778.417224] env[62109]: value = "task-1116370" [ 778.417224] env[62109]: _type = "Task" [ 778.417224] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.425563] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116370, 'name': RelocateVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.550511] env[62109]: DEBUG nova.network.neutron [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updated VIF entry in instance network info cache for port 3cefabfe-3893-464d-ad9b-104d901e71c5. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 778.550917] env[62109]: DEBUG nova.network.neutron [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updating instance_info_cache with network_info: [{"id": "3cefabfe-3893-464d-ad9b-104d901e71c5", "address": "fa:16:3e:66:37:ab", "network": {"id": "57e6dd8f-706a-4cf5-837b-38f98964d675", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-742230249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3fca01fd5c34e6a8fc372bab3e500b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cefabfe-38", "ovs_interfaceid": "3cefabfe-3893-464d-ad9b-104d901e71c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.565928] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 778.579187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.583622] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.384s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.586404] env[62109]: INFO nova.compute.claims [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.601960] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 778.602254] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 778.602435] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.602633] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 778.602847] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.602947] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 778.603378] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 778.603759] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 778.603956] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 778.604112] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 778.604333] env[62109]: DEBUG nova.virt.hardware [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 778.605581] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76268956-aafd-4cef-82f1-345a2ed98634 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.615693] env[62109]: INFO nova.scheduler.client.report [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Deleted allocations for instance 1aaa9eae-9183-49d7-a452-4345ad2a9aa0 [ 778.629499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5373870-765b-4381-9788-697c236e615d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.637915] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.672746] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116365, 'name': Rename_Task, 'duration_secs': 1.175442} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.673331] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 778.673756] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a920bd62-8993-4d76-84ad-32818b9120d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.682941] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 778.682941] env[62109]: value = "task-1116371" [ 778.682941] env[62109]: _type = "Task" [ 778.682941] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.695827] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.865091] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116369, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.931707] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116370, 'name': RelocateVM_Task, 'duration_secs': 0.486678} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.932058] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 778.932276] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244340', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'name': 'volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a24f2349-7c1b-441d-a36e-b16dd61f6031', 'attached_at': '', 'detached_at': '', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'serial': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 778.933260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76178a0-a17c-4b95-85a1-1f3f844bf75a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.951086] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f8e6ce-e066-44c7-9d3e-c7f3d27104c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.975214] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61/volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.975552] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01a0071e-2012-4849-89da-d3cb44f507a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.996916] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 778.996916] env[62109]: value = "task-1116372" [ 778.996916] env[62109]: _type = "Task" [ 778.996916] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.005247] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.054402] env[62109]: DEBUG oslo_concurrency.lockutils [req-30973419-111f-4103-820e-dde1991f8386 req-335d5907-2b6f-465b-b302-bda448046e9e service nova] Releasing lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.064833] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Successfully updated port: ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 779.131625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64c21f9f-84f8-4c8a-a529-79df954cbbcc tempest-ServerAddressesTestJSON-169574328 tempest-ServerAddressesTestJSON-169574328-project-member] Lock "1aaa9eae-9183-49d7-a452-4345ad2a9aa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.922s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.193868] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116371, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.365888] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719777} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.365888] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 779.365888] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.365888] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d55b7f71-9c12-496c-976b-5b78a2768779 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.372674] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 779.372674] env[62109]: value = "task-1116373" [ 779.372674] env[62109]: _type = "Task" [ 779.372674] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.383441] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.506871] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116372, 'name': ReconfigVM_Task, 'duration_secs': 0.327724} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.507331] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Reconfigured VM instance instance-00000033 to attach disk [datastore2] volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61/volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.512525] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31fe7d01-fb76-40d3-8005-74b37445171c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.528706] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 779.528706] env[62109]: value = "task-1116374" [ 779.528706] env[62109]: _type = "Task" [ 779.528706] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.537027] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.568331] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.568755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquired lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.568755] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 779.694600] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116371, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.886849] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.969103] env[62109]: DEBUG nova.compute.manager [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 779.969192] env[62109]: DEBUG nova.compute.manager [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing instance network info cache due to event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 779.969434] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] Acquiring lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.969586] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] Acquired lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.969753] env[62109]: DEBUG nova.network.neutron [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 779.988195] env[62109]: DEBUG nova.compute.manager [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Received event network-vif-plugged-ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 779.988971] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Acquiring lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.989292] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.989516] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.991937] env[62109]: DEBUG nova.compute.manager [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] No waiting events found dispatching network-vif-plugged-ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 779.991937] env[62109]: WARNING nova.compute.manager [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Received unexpected event network-vif-plugged-ef28f215-7e05-46fd-ad13-33c6eab750a4 for instance with vm_state building and task_state spawning. [ 779.991937] env[62109]: DEBUG nova.compute.manager [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Received event network-changed-ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 779.991937] env[62109]: DEBUG nova.compute.manager [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Refreshing instance network info cache due to event network-changed-ef28f215-7e05-46fd-ad13-33c6eab750a4. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 779.991937] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Acquiring lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.012015] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a59cfc-cac9-40d8-a552-287dbb236179 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.022480] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3e47a2-969a-4e46-8b3d-bd7e2fb0dfc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.079143] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fc4e1d-5d68-4fa8-ac45-42b07462c5ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.085450] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116374, 'name': ReconfigVM_Task, 'duration_secs': 0.137998} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.086570] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244340', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'name': 'volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a24f2349-7c1b-441d-a36e-b16dd61f6031', 'attached_at': '', 'detached_at': '', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'serial': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 780.086936] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d83aaaf-6015-43fb-94b9-3e6aabab185b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.091916] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fb8e9b-90c8-4ebd-89b9-aed369f11d50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.097260] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 780.097260] env[62109]: value = "task-1116375" [ 780.097260] env[62109]: _type = "Task" [ 780.097260] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.108878] env[62109]: DEBUG nova.compute.provider_tree [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.116958] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116375, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.133739] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.200933] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116371, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.384045] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.558446} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.384351] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.385132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13d77ae-1c1a-4ba0-891b-8a2bb1023b0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.409080] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.409401] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb194b18-0e5b-4e09-9c71-dddde2525e63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.434054] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 780.434054] env[62109]: value = "task-1116376" [ 780.434054] env[62109]: _type = "Task" [ 780.434054] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.443092] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.599014] env[62109]: DEBUG nova.network.neutron [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updating instance_info_cache with network_info: [{"id": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "address": "fa:16:3e:cd:6a:aa", "network": {"id": "18306f28-f024-4f84-982d-f776a3ef0578", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-843522115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c0ab659413e45b1a43747bc7def4daa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef28f215-7e", "ovs_interfaceid": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.611681] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116375, 'name': Rename_Task, 'duration_secs': 0.15065} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.612027] env[62109]: DEBUG nova.scheduler.client.report [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.615055] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 780.616389] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a614a786-913e-4873-9ea1-7fc0cffbaadb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.623868] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 780.623868] env[62109]: value = "task-1116377" [ 780.623868] env[62109]: _type = "Task" [ 780.623868] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.636698] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.705701] env[62109]: DEBUG oslo_vmware.api [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116371, 'name': PowerOnVM_Task, 'duration_secs': 1.552927} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.706322] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 780.706437] env[62109]: INFO nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 10.33 seconds to spawn the instance on the hypervisor. [ 780.706741] env[62109]: DEBUG nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 780.707948] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a729d1c-19ce-42d2-9d29-3eccf0ba1478 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.926439] env[62109]: DEBUG nova.network.neutron [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updated VIF entry in instance network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 780.929019] env[62109]: DEBUG nova.network.neutron [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updating instance_info_cache with network_info: [{"id": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "address": "fa:16:3e:a7:e1:a4", "network": {"id": "18dd65d1-24ed-4b8e-b0a2-49069a00b5bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-194877863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b58b418b5812479da127b7d697728a98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape350a1ec-a0", "ovs_interfaceid": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.947784] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.030032] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 781.031541] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d112e9d-69cd-43a2-88c7-0e987f5d2fc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.040487] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 781.040868] env[62109]: ERROR oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk due to incomplete transfer. [ 781.041271] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c590170d-c253-4760-88a5-b1dc4f944b75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.050651] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d58ef3-13ed-7d97-1987-7af993154b39/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 781.051129] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Uploaded image cf26e32d-8ae4-4b8a-8c9a-067056eb2211 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 781.053674] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 781.054146] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ca841195-5734-4abb-ad26-f9e3046cf5e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.062598] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 781.062598] env[62109]: value = "task-1116378" [ 781.062598] env[62109]: _type = "Task" [ 781.062598] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.071242] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116378, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.105330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Releasing lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.105593] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Instance network_info: |[{"id": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "address": "fa:16:3e:cd:6a:aa", "network": {"id": "18306f28-f024-4f84-982d-f776a3ef0578", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-843522115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c0ab659413e45b1a43747bc7def4daa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef28f215-7e", "ovs_interfaceid": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 781.105920] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Acquired lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.106217] env[62109]: DEBUG nova.network.neutron [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Refreshing network info cache for port ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 781.107614] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:6a:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef28f215-7e05-46fd-ad13-33c6eab750a4', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.116397] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Creating folder: Project (1c0ab659413e45b1a43747bc7def4daa). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 781.117840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.117840] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 781.120443] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbce1406-2957-40d8-b078-46b40fac0501 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.123227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.850s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.124967] env[62109]: INFO nova.compute.claims [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.144859] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116377, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.145819] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Created folder: Project (1c0ab659413e45b1a43747bc7def4daa) in parent group-v244329. [ 781.146017] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Creating folder: Instances. Parent ref: group-v244411. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 781.146317] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7750eb1-a59e-4f45-be56-d1ad998be2c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.160887] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Created folder: Instances in parent group-v244411. [ 781.161039] env[62109]: DEBUG oslo.service.loopingcall [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.161515] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 781.161823] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19167be-54a7-44fc-8d52-0c18433aff67 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.186887] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.186887] env[62109]: value = "task-1116381" [ 781.186887] env[62109]: _type = "Task" [ 781.186887] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.204655] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116381, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.229447] env[62109]: INFO nova.compute.manager [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 37.06 seconds to build instance. [ 781.432779] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f3c0446-36a6-4b35-bda8-a6ca30bde016 req-d2a8cc2f-6abf-46af-a746-28a425bf32d2 service nova] Releasing lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.448019] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116376, 'name': ReconfigVM_Task, 'duration_secs': 0.779088} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.448019] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.448019] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5432bc78-488b-459e-8ead-4e66d321d734 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.455832] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 781.455832] env[62109]: value = "task-1116382" [ 781.455832] env[62109]: _type = "Task" [ 781.455832] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.465944] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116382, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.573705] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116378, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.626105] env[62109]: DEBUG nova.compute.utils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.630643] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 781.630847] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 781.660717] env[62109]: DEBUG oslo_vmware.api [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116377, 'name': PowerOnVM_Task, 'duration_secs': 0.533443} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.661226] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 781.663894] env[62109]: INFO nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Took 3.99 seconds to spawn the instance on the hypervisor. [ 781.663894] env[62109]: DEBUG nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 781.663894] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e099e026-f8e4-4d3b-a389-0b41de64c11f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.699755] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116381, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.731671] env[62109]: DEBUG nova.policy [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2988618e18934aa6b85d2ea288917ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275238e3083540aa838de6d5cccf61eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.733417] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1e75d9aa-1d85-452c-a121-5a9de9b672cc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.443s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.967206] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116382, 'name': Rename_Task, 'duration_secs': 0.233057} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.967487] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 781.967851] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dccbb281-bb57-4e20-adca-a7720e10fb66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.974586] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 781.974586] env[62109]: value = "task-1116383" [ 781.974586] env[62109]: _type = "Task" [ 781.974586] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.982184] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.004278] env[62109]: DEBUG nova.network.neutron [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updated VIF entry in instance network info cache for port ef28f215-7e05-46fd-ad13-33c6eab750a4. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 782.004583] env[62109]: DEBUG nova.network.neutron [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updating instance_info_cache with network_info: [{"id": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "address": "fa:16:3e:cd:6a:aa", "network": {"id": "18306f28-f024-4f84-982d-f776a3ef0578", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-843522115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c0ab659413e45b1a43747bc7def4daa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef28f215-7e", "ovs_interfaceid": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.075429] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116378, 'name': Destroy_Task, 'duration_secs': 0.659677} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.075896] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Destroyed the VM [ 782.076992] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 782.077359] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0f137714-b352-47e2-9dee-d65b30367458 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.087021] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 782.087021] env[62109]: value = "task-1116384" [ 782.087021] env[62109]: _type = "Task" [ 782.087021] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.094647] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116384, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.133986] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 782.196543] env[62109]: INFO nova.compute.manager [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Took 28.60 seconds to build instance. [ 782.206821] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116381, 'name': CreateVM_Task, 'duration_secs': 0.518912} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.206821] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 782.206821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.206821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.206821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 782.206821] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b735e6a-0b98-4aa9-925b-babfd35d7a54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.213431] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 782.213431] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52189f7f-f947-e0f6-fbd9-24898cfde0cf" [ 782.213431] env[62109]: _type = "Task" [ 782.213431] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.231897] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52189f7f-f947-e0f6-fbd9-24898cfde0cf, 'name': SearchDatastore_Task, 'duration_secs': 0.013762} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.232411] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.232748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.233056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.233269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.233420] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.233701] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab055513-871f-4015-8bf0-6fac6295c78a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.240409] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 782.251041] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Successfully created port: b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.253212] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.253459] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 782.254297] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e71665e6-482d-4144-b5e0-46dd65698937 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.262366] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 782.262366] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52affe3f-68fb-d1a9-4cdb-b6fbeb77526f" [ 782.262366] env[62109]: _type = "Task" [ 782.262366] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.276960] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52affe3f-68fb-d1a9-4cdb-b6fbeb77526f, 'name': SearchDatastore_Task, 'duration_secs': 0.012009} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.277839] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccedff09-ecd2-4ad1-b04b-8e7b5ca7779d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.287701] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 782.287701] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a7264-be50-9c37-c336-ec10c4a35e86" [ 782.287701] env[62109]: _type = "Task" [ 782.287701] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.300412] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524a7264-be50-9c37-c336-ec10c4a35e86, 'name': SearchDatastore_Task, 'duration_secs': 0.011672} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.303227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.303613] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4/c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 782.305029] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffdd6e51-c1f4-4823-bf46-959fd983bac0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.314702] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 782.314702] env[62109]: value = "task-1116385" [ 782.314702] env[62109]: _type = "Task" [ 782.314702] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.326646] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.489802] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116383, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.508175] env[62109]: DEBUG oslo_concurrency.lockutils [req-5044584b-4b42-4836-bb64-d7a673538356 req-c7a7fc0a-217b-47de-ba82-119f638af619 service nova] Releasing lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.597265] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116384, 'name': RemoveSnapshot_Task} progress is 29%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.687622] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2723b544-6daa-4b54-9acf-e6766e98122b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.701406] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f225638a-3305-436c-a46d-7480337790c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.707717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9912332a-ffe1-4c26-8a31-4f45dda069a0 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.744650] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5e30f7-89f2-4d2a-a3f3-143523cbec1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.760167] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc71636-9656-4eb4-9dde-b259b0576c52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.778351] env[62109]: DEBUG nova.compute.provider_tree [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.784464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.825766] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116385, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.991125] env[62109]: DEBUG oslo_vmware.api [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116383, 'name': PowerOnVM_Task, 'duration_secs': 0.709656} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.991601] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 782.992365] env[62109]: INFO nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Took 9.99 seconds to spawn the instance on the hypervisor. [ 782.992659] env[62109]: DEBUG nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 782.993541] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877c0f3d-caff-4396-bc12-f1f51e509842 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.099452] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116384, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.147767] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 783.178766] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 783.179047] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 783.179218] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.179408] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 783.179558] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.179710] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 783.179918] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 783.180814] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 783.181063] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 783.181249] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 783.181437] env[62109]: DEBUG nova.virt.hardware [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.182310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ffc5b3-ce95-4157-ae89-079c4d296c2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.192426] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e53c08-921a-42e5-9369-816042a18d4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.210798] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 783.286298] env[62109]: DEBUG nova.scheduler.client.report [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.327245] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526357} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.327612] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4/c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 783.327842] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.328155] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e2ecf77-86eb-40e2-bd36-a4da979be368 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.335402] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 783.335402] env[62109]: value = "task-1116386" [ 783.335402] env[62109]: _type = "Task" [ 783.335402] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.344175] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.518529] env[62109]: INFO nova.compute.manager [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Took 35.40 seconds to build instance. [ 783.597698] env[62109]: DEBUG oslo_vmware.api [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116384, 'name': RemoveSnapshot_Task, 'duration_secs': 1.033785} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.598020] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 783.598310] env[62109]: INFO nova.compute.manager [None req-f3f05651-e2c8-464b-bc6f-4c81b32bd6e0 tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Took 15.41 seconds to snapshot the instance on the hypervisor. [ 783.741149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.793226] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.793226] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 783.795998] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.183s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.796110] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.796348] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 783.796852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.294s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.796852] env[62109]: DEBUG nova.objects.instance [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lazy-loading 'resources' on Instance uuid f6d3a50c-bcc3-4a6f-969f-4e629646f427 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.802030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a369425-0491-4d10-b746-20371955a690 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.809932] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b41e63-d026-44cf-840a-5f68fc18550f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.829500] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4fad9a-5e22-4e0e-acf9-ec70ca2230db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.841968] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97164bc4-61bc-43e9-ab38-e5c7bc843b49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.852129] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069845} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.881509] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.882613] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181228MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 783.882613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.883657] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0028cc7-e081-4ca2-9ee0-57fa56ff396f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.910296] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4/c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.911445] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Successfully updated port: b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.912666] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87121067-7300-4d34-b2af-cea0e188bcb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.929309] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.929546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.929659] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 783.938116] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 783.938116] env[62109]: value = "task-1116387" [ 783.938116] env[62109]: _type = "Task" [ 783.938116] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.950378] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.021056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-261e9199-a872-49d2-a21e-479f121df335 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.450s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.092671] env[62109]: DEBUG nova.compute.manager [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Received event network-changed-7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.092968] env[62109]: DEBUG nova.compute.manager [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Refreshing instance network info cache due to event network-changed-7c4891b0-c525-4571-aa3b-47cc9a42d8ac. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 784.093106] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.093255] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.093420] env[62109]: DEBUG nova.network.neutron [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Refreshing network info cache for port 7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 784.300353] env[62109]: DEBUG nova.compute.utils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.301808] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 784.305121] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 784.357384] env[62109]: DEBUG nova.compute.manager [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Received event network-vif-plugged-b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.358023] env[62109]: DEBUG oslo_concurrency.lockutils [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] Acquiring lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.358023] env[62109]: DEBUG oslo_concurrency.lockutils [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.358177] env[62109]: DEBUG oslo_concurrency.lockutils [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.359258] env[62109]: DEBUG nova.compute.manager [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] No waiting events found dispatching network-vif-plugged-b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 784.359258] env[62109]: WARNING nova.compute.manager [req-1db58cbf-7dc7-4828-b972-986e1491e875 req-c63d08fe-4122-43ed-a299-f4c1f36628a2 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Received unexpected event network-vif-plugged-b97f6c74-b63d-475c-93d8-e340e00f169c for instance with vm_state building and task_state spawning. [ 784.405646] env[62109]: DEBUG nova.policy [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6623b629754bbbae2d613013e08ec2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '172388e3349b45c2aa63422749513204', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 784.422582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.422812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.453858] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.493372] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.523787] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 784.730624] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2a34ff-4bee-41b7-b098-018b363bdf35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.740318] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e258dab3-fb56-45e1-9195-38146d1ba136 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.775421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bc8d8c-a6c3-4480-b483-c4dc995bb02e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.784787] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451f7720-eb81-4ec9-8844-fd50c350cfac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.798972] env[62109]: DEBUG nova.compute.provider_tree [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.806569] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 784.830401] env[62109]: DEBUG nova.network.neutron [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Updating instance_info_cache with network_info: [{"id": "b97f6c74-b63d-475c-93d8-e340e00f169c", "address": "fa:16:3e:5d:e6:cd", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb97f6c74-b6", "ovs_interfaceid": "b97f6c74-b63d-475c-93d8-e340e00f169c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.927238] env[62109]: DEBUG nova.compute.utils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.949784] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116387, 'name': ReconfigVM_Task, 'duration_secs': 0.615324} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.950101] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Reconfigured VM instance instance-00000034 to attach disk [datastore2] c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4/c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.950735] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c98d0c90-0cac-4456-b175-6c99e198508e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.958087] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 784.958087] env[62109]: value = "task-1116388" [ 784.958087] env[62109]: _type = "Task" [ 784.958087] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.967913] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116388, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.042957] env[62109]: DEBUG nova.network.neutron [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updated VIF entry in instance network info cache for port 7c4891b0-c525-4571-aa3b-47cc9a42d8ac. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 785.042957] env[62109]: DEBUG nova.network.neutron [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.043838] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Successfully created port: 8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.049832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.302085] env[62109]: DEBUG nova.scheduler.client.report [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.338343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.338730] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Instance network_info: |[{"id": "b97f6c74-b63d-475c-93d8-e340e00f169c", "address": "fa:16:3e:5d:e6:cd", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb97f6c74-b6", "ovs_interfaceid": "b97f6c74-b63d-475c-93d8-e340e00f169c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 785.339250] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:e6:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b97f6c74-b63d-475c-93d8-e340e00f169c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.348675] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating folder: Project (275238e3083540aa838de6d5cccf61eb). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.349394] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09db6693-2859-4304-96c5-6a0c33245d34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.363139] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created folder: Project (275238e3083540aa838de6d5cccf61eb) in parent group-v244329. [ 785.363384] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating folder: Instances. Parent ref: group-v244414. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.363734] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72ce9dc4-b698-4475-9605-72924a619413 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.375359] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created folder: Instances in parent group-v244414. [ 785.375670] env[62109]: DEBUG oslo.service.loopingcall [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.375900] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 785.376224] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5c0518b-ca7b-41bb-a9eb-9a2ac7dd2d4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.398544] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.398544] env[62109]: value = "task-1116391" [ 785.398544] env[62109]: _type = "Task" [ 785.398544] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.406355] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116391, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.430209] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.468877] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116388, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.544383] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb7bff7-9c51-4e1e-b2db-8039b84283f6 req-6b7682b0-d3ee-4650-9136-1800343aa2a8 service nova] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.809362] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.811781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.297s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.812027] env[62109]: DEBUG nova.objects.instance [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lazy-loading 'resources' on Instance uuid 8584eb2c-57a3-455e-9d3c-877286e23ccc {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.816233] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 785.837656] env[62109]: INFO nova.scheduler.client.report [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Deleted allocations for instance f6d3a50c-bcc3-4a6f-969f-4e629646f427 [ 785.847536] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.847801] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.847965] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.848483] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.848483] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.848483] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.848654] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.848816] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.848989] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.849185] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.849348] env[62109]: DEBUG nova.virt.hardware [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.850221] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c830af49-10a2-4631-b59d-e95fa9548328 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.861674] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3679ebd5-5959-47e7-9693-26cffbd348a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.908874] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116391, 'name': CreateVM_Task, 'duration_secs': 0.439774} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.909056] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 785.910248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.910423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.910770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 785.913311] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdcb0d11-e048-4d40-b393-0c55aaf44a2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.918706] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 785.918706] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521b800b-1fae-d164-08d0-0cd389e0a860" [ 785.918706] env[62109]: _type = "Task" [ 785.918706] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.927861] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521b800b-1fae-d164-08d0-0cd389e0a860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.971357] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116388, 'name': Rename_Task, 'duration_secs': 0.709188} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.971656] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 785.971904] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-277ae8d2-f78e-467e-974d-2c2ba319fdf2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.980925] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 785.980925] env[62109]: value = "task-1116392" [ 785.980925] env[62109]: _type = "Task" [ 785.980925] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.989668] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.313796] env[62109]: DEBUG nova.compute.manager [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.313979] env[62109]: DEBUG nova.compute.manager [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing instance network info cache due to event network-changed-e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.314245] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Acquiring lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.314384] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Acquired lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.314630] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Refreshing network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 786.349144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-46f2e078-7e27-4f79-94b2-f2d5e86895ca tempest-ImagesOneServerTestJSON-401427409 tempest-ImagesOneServerTestJSON-401427409-project-member] Lock "f6d3a50c-bcc3-4a6f-969f-4e629646f427" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.418032] env[62109]: DEBUG nova.compute.manager [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 786.420451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122cf58-5bec-40c3-807d-5fd6612869a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.425956] env[62109]: DEBUG nova.compute.manager [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Received event network-changed-b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.426131] env[62109]: DEBUG nova.compute.manager [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Refreshing instance network info cache due to event network-changed-b97f6c74-b63d-475c-93d8-e340e00f169c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.426590] env[62109]: DEBUG oslo_concurrency.lockutils [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] Acquiring lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.426590] env[62109]: DEBUG oslo_concurrency.lockutils [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] Acquired lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.426854] env[62109]: DEBUG nova.network.neutron [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Refreshing network info cache for port b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 786.447554] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521b800b-1fae-d164-08d0-0cd389e0a860, 'name': SearchDatastore_Task, 'duration_secs': 0.023957} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.451045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.451351] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.452748] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.452799] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.453141] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.453886] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bc40f8b-30d3-4fce-a8cd-5e8cefd1c3bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.470533] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.470533] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 786.473099] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2738d146-0cda-4293-bd1e-8e11a5a54282 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.482592] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 786.482592] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0827a-95b2-3dc2-9a0e-83057e9c7529" [ 786.482592] env[62109]: _type = "Task" [ 786.482592] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.499466] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.499781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.500081] env[62109]: INFO nova.compute.manager [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Attaching volume fbb52df2-cecd-40ba-89ef-7b4d6f79e515 to /dev/sdb [ 786.502037] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116392, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.505536] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0827a-95b2-3dc2-9a0e-83057e9c7529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.563566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89969b1c-2f49-4baa-9a91-6d604f454f53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.969020] env[62109]: DEBUG nova.compute.manager [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Received event network-vif-plugged-8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.969020] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] Acquiring lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.969020] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.969020] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.969020] env[62109]: DEBUG nova.compute.manager [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] No waiting events found dispatching network-vif-plugged-8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 786.970210] env[62109]: WARNING nova.compute.manager [req-ed923deb-ad46-406a-8540-1037a15026c2 req-1681dce1-d8c4-48f9-af5a-695de3bafc21 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Received unexpected event network-vif-plugged-8c141056-fbc3-4508-a389-9a9ed6566325 for instance with vm_state building and task_state spawning. [ 786.970210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e32f7ef-8448-416b-bc5f-03fc4acc8ac8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.970210] env[62109]: DEBUG nova.virt.block_device [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating existing volume attachment record: e47f28dc-f45f-422b-9cb2-5102ac0b52f4 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 786.970210] env[62109]: INFO nova.compute.manager [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] instance snapshotting [ 786.970210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10293cf-92a8-4a2c-bdaf-c46fc9a3e7fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.016914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd437da3-7133-476e-a597-a0984aec5010 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.042022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.042410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.042526] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.042789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.042848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.044804] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d0827a-95b2-3dc2-9a0e-83057e9c7529, 'name': SearchDatastore_Task, 'duration_secs': 0.021872} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.047198] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116392, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.048306] env[62109]: INFO nova.compute.manager [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Terminating instance [ 787.053359] env[62109]: DEBUG nova.compute.manager [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 787.053359] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 787.054550] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d4ad968-7ad4-4498-9cd0-c9b86d16cfeb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.057048] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9949ad40-6d4d-4222-8977-6902e1b858bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.071370] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 787.071370] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520abec2-debb-69ba-4cde-48af649fba3d" [ 787.071370] env[62109]: _type = "Task" [ 787.071370] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.071723] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 787.074767] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee5d02d8-32e0-49d6-8391-e4a2dbd70695 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.086730] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520abec2-debb-69ba-4cde-48af649fba3d, 'name': SearchDatastore_Task, 'duration_secs': 0.010942} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.088239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.088614] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563/c5c63ece-611d-45d1-a8e6-9327700f1563.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 787.089103] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 787.089103] env[62109]: value = "task-1116393" [ 787.089103] env[62109]: _type = "Task" [ 787.089103] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.089316] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6217fba-068a-49c2-af0d-2ccb17841781 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.101491] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 787.101491] env[62109]: value = "task-1116395" [ 787.101491] env[62109]: _type = "Task" [ 787.101491] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.105535] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.115659] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.171784] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949b0d2c-9f48-466d-8ad5-d1f0f2792f65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.180701] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25950d01-8159-4ace-bb79-87ade5a77b9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.215694] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13eead46-313e-40dc-af18-1fdd09279617 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.225576] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de558831-0d89-4d21-9611-33e522309ec6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.243052] env[62109]: DEBUG nova.compute.provider_tree [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.359520] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updated VIF entry in instance network info cache for port e350a1ec-a026-4ac9-80fd-14259052b3b2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 787.360136] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updating instance_info_cache with network_info: [{"id": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "address": "fa:16:3e:a7:e1:a4", "network": {"id": "18dd65d1-24ed-4b8e-b0a2-49069a00b5bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-194877863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b58b418b5812479da127b7d697728a98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape350a1ec-a0", "ovs_interfaceid": "e350a1ec-a026-4ac9-80fd-14259052b3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.381866] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Successfully updated port: 8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.521241] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116392, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.550324] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 787.550848] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2c42f28b-afb9-435c-a540-b45b26757abd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.564064] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 787.564064] env[62109]: value = "task-1116398" [ 787.564064] env[62109]: _type = "Task" [ 787.564064] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.579394] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116398, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.617063] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116393, 'name': PowerOffVM_Task, 'duration_secs': 0.212348} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.617658] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 787.618028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 787.618840] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea1b0a6b-ca25-4519-a1ef-0b1ff74dda1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.625623] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116395, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.699024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 787.699024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 787.699024] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Deleting the datastore file [datastore1] 7f40cdc8-3421-47b7-b148-ff6417105dbb {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.699024] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b15fa00f-105e-41a9-bbb3-ec9b70db587d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.709802] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for the task: (returnval){ [ 787.709802] env[62109]: value = "task-1116400" [ 787.709802] env[62109]: _type = "Task" [ 787.709802] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.718166] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.723574] env[62109]: DEBUG nova.network.neutron [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Updated VIF entry in instance network info cache for port b97f6c74-b63d-475c-93d8-e340e00f169c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 787.724626] env[62109]: DEBUG nova.network.neutron [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Updating instance_info_cache with network_info: [{"id": "b97f6c74-b63d-475c-93d8-e340e00f169c", "address": "fa:16:3e:5d:e6:cd", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb97f6c74-b6", "ovs_interfaceid": "b97f6c74-b63d-475c-93d8-e340e00f169c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.745841] env[62109]: DEBUG nova.scheduler.client.report [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 787.865023] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Releasing lock "refresh_cache-7f40cdc8-3421-47b7-b148-ff6417105dbb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.865023] env[62109]: DEBUG nova.compute.manager [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Received event network-changed-3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.865023] env[62109]: DEBUG nova.compute.manager [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Refreshing instance network info cache due to event network-changed-3cefabfe-3893-464d-ad9b-104d901e71c5. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.865023] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Acquiring lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.865023] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Acquired lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.865474] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Refreshing network info cache for port 3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 787.885613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.885613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquired lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.885613] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 788.019594] env[62109]: DEBUG oslo_vmware.api [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116392, 'name': PowerOnVM_Task, 'duration_secs': 1.761837} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.020156] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 788.020384] env[62109]: INFO nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Took 9.45 seconds to spawn the instance on the hypervisor. [ 788.020574] env[62109]: DEBUG nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 788.021424] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f508a88b-c568-4146-acdb-717abc25ac81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.075116] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116398, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.117673] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526516} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.117970] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563/c5c63ece-611d-45d1-a8e6-9327700f1563.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 788.118211] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.118474] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-212f08cc-6f62-4cf7-9965-298cec772e08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.127513] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 788.127513] env[62109]: value = "task-1116401" [ 788.127513] env[62109]: _type = "Task" [ 788.127513] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.141379] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.218872] env[62109]: DEBUG oslo_vmware.api [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Task: {'id': task-1116400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152316} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.219201] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.219371] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 788.219557] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 788.219738] env[62109]: INFO nova.compute.manager [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 788.219995] env[62109]: DEBUG oslo.service.loopingcall [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.220209] env[62109]: DEBUG nova.compute.manager [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 788.220304] env[62109]: DEBUG nova.network.neutron [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 788.227044] env[62109]: DEBUG oslo_concurrency.lockutils [req-75dbd941-e220-4a1c-a729-54fad84f416f req-269328d7-39bc-4613-9dc0-b6881a2ae072 service nova] Releasing lock "refresh_cache-c5c63ece-611d-45d1-a8e6-9327700f1563" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.256878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.442s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.262565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.659s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.264713] env[62109]: INFO nova.compute.claims [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.287733] env[62109]: INFO nova.scheduler.client.report [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Deleted allocations for instance 8584eb2c-57a3-455e-9d3c-877286e23ccc [ 788.465381] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 788.493159] env[62109]: DEBUG nova.compute.manager [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 788.545454] env[62109]: INFO nova.compute.manager [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Took 34.32 seconds to build instance. [ 788.545454] env[62109]: DEBUG nova.compute.manager [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Received event network-changed-8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.545454] env[62109]: DEBUG nova.compute.manager [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Refreshing instance network info cache due to event network-changed-8c141056-fbc3-4508-a389-9a9ed6566325. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 788.545454] env[62109]: DEBUG oslo_concurrency.lockutils [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] Acquiring lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.583434] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116398, 'name': CreateSnapshot_Task, 'duration_secs': 0.529371} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.585377] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 788.585377] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe5de15-9883-4546-8f9e-177914eba021 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.639905] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074527} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.640353] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.641245] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004baba2-0135-41e2-a6e2-704648339d66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.666620] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563/c5c63ece-611d-45d1-a8e6-9327700f1563.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.671960] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc844ca1-1efb-4145-a2f0-fd1248d46dda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.702843] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 788.702843] env[62109]: value = "task-1116402" [ 788.702843] env[62109]: _type = "Task" [ 788.702843] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.717942] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.798157] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d109dad5-b0ee-4b63-968c-fb72df5b6d21 tempest-ServerMetadataNegativeTestJSON-914971530 tempest-ServerMetadataNegativeTestJSON-914971530-project-member] Lock "8584eb2c-57a3-455e-9d3c-877286e23ccc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.422s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.889234] env[62109]: DEBUG nova.network.neutron [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updating instance_info_cache with network_info: [{"id": "8c141056-fbc3-4508-a389-9a9ed6566325", "address": "fa:16:3e:3b:fb:62", "network": {"id": "f2e32a40-4ae4-4c71-901c-ff0d4e5fa042", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1356570714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "172388e3349b45c2aa63422749513204", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c141056-fb", "ovs_interfaceid": "8c141056-fbc3-4508-a389-9a9ed6566325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.960513] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updated VIF entry in instance network info cache for port 3cefabfe-3893-464d-ad9b-104d901e71c5. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 788.960909] env[62109]: DEBUG nova.network.neutron [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updating instance_info_cache with network_info: [{"id": "3cefabfe-3893-464d-ad9b-104d901e71c5", "address": "fa:16:3e:66:37:ab", "network": {"id": "57e6dd8f-706a-4cf5-837b-38f98964d675", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-742230249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3fca01fd5c34e6a8fc372bab3e500b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cefabfe-38", "ovs_interfaceid": "3cefabfe-3893-464d-ad9b-104d901e71c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.021330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.043302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a456044b-e5da-427d-902d-020b8e8c3c14 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.164s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.108357] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 789.108605] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-99ccfde4-89c1-4fca-a7be-b34184622aae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.119889] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 789.119889] env[62109]: value = "task-1116403" [ 789.119889] env[62109]: _type = "Task" [ 789.119889] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.129819] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116403, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.217465] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116402, 'name': ReconfigVM_Task, 'duration_secs': 0.40407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.218051] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Reconfigured VM instance instance-00000035 to attach disk [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563/c5c63ece-611d-45d1-a8e6-9327700f1563.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.221210] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97b23d57-bc98-4cbe-935e-69841b7d5e7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.232219] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 789.232219] env[62109]: value = "task-1116404" [ 789.232219] env[62109]: _type = "Task" [ 789.232219] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.240563] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116404, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.369129] env[62109]: DEBUG nova.network.neutron [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.395019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Releasing lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.395019] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Instance network_info: |[{"id": "8c141056-fbc3-4508-a389-9a9ed6566325", "address": "fa:16:3e:3b:fb:62", "network": {"id": "f2e32a40-4ae4-4c71-901c-ff0d4e5fa042", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1356570714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "172388e3349b45c2aa63422749513204", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c141056-fb", "ovs_interfaceid": "8c141056-fbc3-4508-a389-9a9ed6566325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 789.396135] env[62109]: DEBUG oslo_concurrency.lockutils [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] Acquired lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.396450] env[62109]: DEBUG nova.network.neutron [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Refreshing network info cache for port 8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 789.399025] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:fb:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c141056-fbc3-4508-a389-9a9ed6566325', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.411785] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Creating folder: Project (172388e3349b45c2aa63422749513204). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 789.411785] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1edb5943-6f32-4a31-8ad7-3d5eee103d02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.423108] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Created folder: Project (172388e3349b45c2aa63422749513204) in parent group-v244329. [ 789.423447] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Creating folder: Instances. Parent ref: group-v244421. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 789.427396] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e4260cd-2f38-43b0-966a-cad0b0d1f4ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.440171] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Created folder: Instances in parent group-v244421. [ 789.440500] env[62109]: DEBUG oslo.service.loopingcall [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.440727] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 789.440951] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9788aaf3-936a-410d-bb17-fda171995722 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.464244] env[62109]: DEBUG oslo_concurrency.lockutils [req-55247a2d-a6f2-4a3a-9301-bbf5ac4cd712 req-91c7a659-09a9-4e46-9552-b92f5371150a service nova] Releasing lock "refresh_cache-a24f2349-7c1b-441d-a36e-b16dd61f6031" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.466343] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.466343] env[62109]: value = "task-1116408" [ 789.466343] env[62109]: _type = "Task" [ 789.466343] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.475979] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116408, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.638859] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116403, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.731360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7ea5dc-cc20-47c4-83fb-150ba8182a18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.747572] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082a0be4-3ba7-4265-9d52-28727819ebd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.751545] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116404, 'name': Rename_Task, 'duration_secs': 0.358991} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.751730] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 789.752418] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-873263d8-95a4-4e4d-bb7d-b7cb850e71c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.782994] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da7ac63-ce81-43b4-a5a3-169b15d59ad5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.785865] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 789.785865] env[62109]: value = "task-1116409" [ 789.785865] env[62109]: _type = "Task" [ 789.785865] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.793929] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e84366-91d6-4698-ad0c-a738f7226908 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.801740] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116409, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.813467] env[62109]: DEBUG nova.compute.provider_tree [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.871387] env[62109]: INFO nova.compute.manager [-] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Took 1.65 seconds to deallocate network for instance. [ 789.979842] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116408, 'name': CreateVM_Task, 'duration_secs': 0.476689} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.979958] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 789.980710] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.980893] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.981483] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.981742] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4733dda1-449c-484a-a415-0ea76e66d95d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.988234] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 789.988234] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e8e5-8ba9-a9bb-6815-f20ac6dfa520" [ 789.988234] env[62109]: _type = "Task" [ 789.988234] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.000850] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e8e5-8ba9-a9bb-6815-f20ac6dfa520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.135668] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116403, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.250807] env[62109]: DEBUG nova.network.neutron [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updated VIF entry in instance network info cache for port 8c141056-fbc3-4508-a389-9a9ed6566325. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 790.251468] env[62109]: DEBUG nova.network.neutron [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updating instance_info_cache with network_info: [{"id": "8c141056-fbc3-4508-a389-9a9ed6566325", "address": "fa:16:3e:3b:fb:62", "network": {"id": "f2e32a40-4ae4-4c71-901c-ff0d4e5fa042", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1356570714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "172388e3349b45c2aa63422749513204", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c141056-fb", "ovs_interfaceid": "8c141056-fbc3-4508-a389-9a9ed6566325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.300630] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116409, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.317648] env[62109]: DEBUG nova.scheduler.client.report [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.379408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.453468] env[62109]: DEBUG nova.compute.manager [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Received event network-changed-ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 790.453468] env[62109]: DEBUG nova.compute.manager [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Refreshing instance network info cache due to event network-changed-ef28f215-7e05-46fd-ad13-33c6eab750a4. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 790.453468] env[62109]: DEBUG oslo_concurrency.lockutils [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] Acquiring lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.453468] env[62109]: DEBUG oslo_concurrency.lockutils [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] Acquired lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.453468] env[62109]: DEBUG nova.network.neutron [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Refreshing network info cache for port ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 790.505059] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e8e5-8ba9-a9bb-6815-f20ac6dfa520, 'name': SearchDatastore_Task, 'duration_secs': 0.010902} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.505291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.505519] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.505762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.505918] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.506137] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.506538] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-069b163b-0678-49ab-af7f-9a298af91d03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.522745] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.522745] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 790.523544] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4bcf036-dded-4380-a21f-032d59eed006 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.532297] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 790.532297] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fe1cd6-52da-ec87-061d-bcdfa6c60bc1" [ 790.532297] env[62109]: _type = "Task" [ 790.532297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.550787] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fe1cd6-52da-ec87-061d-bcdfa6c60bc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.635808] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116403, 'name': CloneVM_Task, 'duration_secs': 1.357972} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.636624] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Created linked-clone VM from snapshot [ 790.637895] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20237a3e-b163-436b-ae62-cffa2a01b40f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.649165] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Uploading image e68eba09-0579-4426-a954-9275e3a6d751 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 790.683650] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 790.683650] env[62109]: value = "vm-244420" [ 790.683650] env[62109]: _type = "VirtualMachine" [ 790.683650] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 790.684458] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50e10a6a-ee96-486b-b701-ddfab7235d5c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.695081] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease: (returnval){ [ 790.695081] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bf2fa-6381-5252-410b-f8c6e12b84be" [ 790.695081] env[62109]: _type = "HttpNfcLease" [ 790.695081] env[62109]: } obtained for exporting VM: (result){ [ 790.695081] env[62109]: value = "vm-244420" [ 790.695081] env[62109]: _type = "VirtualMachine" [ 790.695081] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 790.695081] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the lease: (returnval){ [ 790.695081] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bf2fa-6381-5252-410b-f8c6e12b84be" [ 790.695081] env[62109]: _type = "HttpNfcLease" [ 790.695081] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 790.710945] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 790.710945] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bf2fa-6381-5252-410b-f8c6e12b84be" [ 790.710945] env[62109]: _type = "HttpNfcLease" [ 790.710945] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 790.715023] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 790.715023] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522bf2fa-6381-5252-410b-f8c6e12b84be" [ 790.715023] env[62109]: _type = "HttpNfcLease" [ 790.715023] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 790.715023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66076a50-23ca-476f-b92b-343addbf9665 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.728879] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 790.729195] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 790.826417] env[62109]: DEBUG oslo_concurrency.lockutils [req-e334045e-b572-43b7-bf5a-95d10019865c req-ddf1e34a-6569-45dc-a1f1-ffe5986098a6 service nova] Releasing lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.827701] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.828395] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 790.841316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.280s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.844229] env[62109]: INFO nova.compute.claims [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.852037] env[62109]: DEBUG nova.compute.manager [req-3bc15ed4-b907-425c-97d7-5f420bfb9230 req-b3daaf58-a4c9-4938-9c78-d8c1358d9884 service nova] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Received event network-vif-deleted-e350a1ec-a026-4ac9-80fd-14259052b3b2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 790.861033] env[62109]: DEBUG oslo_vmware.api [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116409, 'name': PowerOnVM_Task, 'duration_secs': 0.652925} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.861452] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 790.861860] env[62109]: INFO nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Took 7.71 seconds to spawn the instance on the hypervisor. [ 790.862144] env[62109]: DEBUG nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 790.863394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c97a05-b208-433d-a952-4c6e966ee3fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.974608] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-679fc7db-055b-421d-bfa6-7d32f29d8391 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.054518] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fe1cd6-52da-ec87-061d-bcdfa6c60bc1, 'name': SearchDatastore_Task, 'duration_secs': 0.012055} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.056038] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a605637e-932b-405f-9ebb-93e890f58df2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.065435] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 791.065435] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5263bdac-0653-19aa-eba5-cea5ab199bf8" [ 791.065435] env[62109]: _type = "Task" [ 791.065435] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.078366] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5263bdac-0653-19aa-eba5-cea5ab199bf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.353279] env[62109]: DEBUG nova.compute.utils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.355174] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 791.355458] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 791.384608] env[62109]: INFO nova.compute.manager [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Took 34.20 seconds to build instance. [ 791.581365] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5263bdac-0653-19aa-eba5-cea5ab199bf8, 'name': SearchDatastore_Task, 'duration_secs': 0.038877} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.582017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.582017] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 9b2968bb-ed06-4740-b43e-b4aa1fac76dd/9b2968bb-ed06-4740-b43e-b4aa1fac76dd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 791.582253] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c2b3f54-495f-4d5a-82ff-7f91791f83f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.590921] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 791.590921] env[62109]: value = "task-1116411" [ 791.590921] env[62109]: _type = "Task" [ 791.590921] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.602113] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.636034] env[62109]: DEBUG nova.policy [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 791.711042] env[62109]: DEBUG nova.network.neutron [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updated VIF entry in instance network info cache for port ef28f215-7e05-46fd-ad13-33c6eab750a4. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 791.711439] env[62109]: DEBUG nova.network.neutron [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updating instance_info_cache with network_info: [{"id": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "address": "fa:16:3e:cd:6a:aa", "network": {"id": "18306f28-f024-4f84-982d-f776a3ef0578", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-843522115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c0ab659413e45b1a43747bc7def4daa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef28f215-7e", "ovs_interfaceid": "ef28f215-7e05-46fd-ad13-33c6eab750a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.860936] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 791.889687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d983147-ce6c-4906-a3cc-4ba13194d043 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.814s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.990747] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 791.991345] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244418', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'name': 'volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1399f618-3a93-4731-a59b-f98306d6cd52', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'serial': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 791.994587] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e9949a-7a16-4da6-836c-82cd5fbc8463 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.033071] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b94040f-e2f4-42ec-951b-e799eddc8dbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.083465] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515/volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.087808] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdf24c9f-ad82-443b-b6eb-5b8d986fef62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.128463] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 792.128463] env[62109]: value = "task-1116412" [ 792.128463] env[62109]: _type = "Task" [ 792.128463] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.139540] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116411, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.169484] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.222355] env[62109]: DEBUG oslo_concurrency.lockutils [req-f578c0c8-109d-45ed-bcb1-85b462e7d455 req-fdcc05b7-50dd-41ab-8455-61548bffaed2 service nova] Releasing lock "refresh_cache-c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.271743] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Successfully created port: e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.554661] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42ebea2-e625-47eb-b057-aa1d2284342a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.563775] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8716589-0480-4e03-8134-ec8c4edc3732 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.600180] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d8a335-12df-44df-a983-d508ee817689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.612350] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e018a7-b768-4f8a-a5c1-c853735bd4ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.631257] env[62109]: DEBUG nova.compute.provider_tree [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.635955] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690332} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.636584] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 9b2968bb-ed06-4740-b43e-b4aa1fac76dd/9b2968bb-ed06-4740-b43e-b4aa1fac76dd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 792.636886] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 792.637213] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e634ff3b-c9c1-4d4b-a751-ffd90aea98d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.653648] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.654722] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 792.654722] env[62109]: value = "task-1116413" [ 792.654722] env[62109]: _type = "Task" [ 792.654722] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.664399] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.877919] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 792.909780] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.910040] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.911321] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.911681] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.911948] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.912494] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.912775] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.913045] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.913251] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.913436] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.913604] env[62109]: DEBUG nova.virt.hardware [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.914867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ec973c-38fc-408e-b442-57e62c355df9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.924481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6149b4a8-cd19-467b-87d0-80a8ebe72531 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.107617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.107965] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.108133] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.108321] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.108498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.111280] env[62109]: INFO nova.compute.manager [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Terminating instance [ 793.113745] env[62109]: DEBUG nova.compute.manager [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 793.113953] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 793.114912] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ab6a97-3a9c-42fb-8952-9f91b1fdb0b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.126214] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 793.127069] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-474720e9-3139-4cb9-913c-0f9ffe9e2eac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.134817] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 793.134817] env[62109]: value = "task-1116414" [ 793.134817] env[62109]: _type = "Task" [ 793.134817] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.140449] env[62109]: DEBUG nova.scheduler.client.report [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 793.159118] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.160245] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116412, 'name': ReconfigVM_Task, 'duration_secs': 0.819942} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.165010] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfigured VM instance instance-00000026 to attach disk [datastore2] volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515/volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.172078] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80dd0e04-38bb-47b0-b910-e9d530a36bed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.185896] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f170f0-3dc6-488a-96a4-c6be0b8a9440 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.195060] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179803} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.197027] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.197440] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 793.197440] env[62109]: value = "task-1116415" [ 793.197440] env[62109]: _type = "Task" [ 793.197440] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.200095] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28d1c00-3b6b-4f83-87bb-4e866eb8b574 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.208847] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Suspending the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 793.209210] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8bba21c4-5bdd-4391-a38a-bd8eacebcc51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.220138] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.242542] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 9b2968bb-ed06-4740-b43e-b4aa1fac76dd/9b2968bb-ed06-4740-b43e-b4aa1fac76dd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.242960] env[62109]: DEBUG oslo_vmware.api [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 793.242960] env[62109]: value = "task-1116416" [ 793.242960] env[62109]: _type = "Task" [ 793.242960] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.243203] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db67075a-d6c1-4450-a46b-de60fc2ab40d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.275064] env[62109]: DEBUG oslo_vmware.api [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116416, 'name': SuspendVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.275064] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 793.275064] env[62109]: value = "task-1116417" [ 793.275064] env[62109]: _type = "Task" [ 793.275064] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.285688] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.552244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.552244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.647701] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116414, 'name': PowerOffVM_Task, 'duration_secs': 0.444288} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.648219] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 793.648611] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 793.649676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.650297] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 793.657685] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb33cb91-7590-49fb-808b-d646717eb738 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.659994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.612s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.660487] env[62109]: DEBUG nova.objects.instance [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lazy-loading 'resources' on Instance uuid dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 793.715801] env[62109]: DEBUG oslo_vmware.api [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116415, 'name': ReconfigVM_Task, 'duration_secs': 0.229774} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.716357] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244418', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'name': 'volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1399f618-3a93-4731-a59b-f98306d6cd52', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'serial': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 793.749416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 793.749865] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 793.750107] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Deleting the datastore file [datastore2] 8b6ec904-8c68-4eaa-94fe-47a87528e26b {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 793.750400] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7de84db1-3239-433b-9864-86294026c2d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.760585] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for the task: (returnval){ [ 793.760585] env[62109]: value = "task-1116419" [ 793.760585] env[62109]: _type = "Task" [ 793.760585] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.773403] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.777578] env[62109]: DEBUG oslo_vmware.api [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116416, 'name': SuspendVM_Task} progress is 62%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.788018] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116417, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.057258] env[62109]: DEBUG nova.compute.utils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.164237] env[62109]: DEBUG nova.compute.utils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.164237] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 794.164712] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.192819] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Successfully updated port: e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.228033] env[62109]: DEBUG nova.policy [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 794.275985] env[62109]: DEBUG oslo_vmware.api [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116416, 'name': SuspendVM_Task, 'duration_secs': 0.758986} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.277558] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Suspended the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 794.277708] env[62109]: DEBUG nova.compute.manager [None req-be4731c6-504b-48d4-824e-46f46ec72d73 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 794.278739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a35d9e-297e-434b-8583-87e702aa3f90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.295945] env[62109]: DEBUG oslo_vmware.api [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Task: {'id': task-1116419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196805} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.301413] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.301598] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 794.301784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 794.302500] env[62109]: INFO nova.compute.manager [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 794.302500] env[62109]: DEBUG oslo.service.loopingcall [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.305520] env[62109]: DEBUG nova.compute.manager [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.305520] env[62109]: DEBUG nova.network.neutron [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 794.311782] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116417, 'name': ReconfigVM_Task, 'duration_secs': 0.637609} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.315575] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 9b2968bb-ed06-4740-b43e-b4aa1fac76dd/9b2968bb-ed06-4740-b43e-b4aa1fac76dd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.316454] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbff4259-a89c-4bba-b67e-cdb6a65bd7e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.326455] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 794.326455] env[62109]: value = "task-1116420" [ 794.326455] env[62109]: _type = "Task" [ 794.326455] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.345574] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116420, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.560621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.619661] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33be9d21-7899-4e62-8db9-c640158d370e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.631292] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df39c6-ddf6-4f1d-ba91-a45cc340ef75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.680450] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 794.688033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22a65e7-3071-409a-aa5d-491e9b28a303 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.696261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.696495] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.700045] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.700045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33be3db5-2869-49e2-9ac3-64bbe2af9a4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.717560] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.761520] env[62109]: DEBUG nova.objects.instance [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.827019] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Successfully created port: a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.843523] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116420, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.216700] env[62109]: DEBUG nova.compute.manager [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received event network-vif-plugged-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 795.216982] env[62109]: DEBUG oslo_concurrency.lockutils [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.217215] env[62109]: DEBUG oslo_concurrency.lockutils [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.217444] env[62109]: DEBUG oslo_concurrency.lockutils [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.217647] env[62109]: DEBUG nova.compute.manager [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] No waiting events found dispatching network-vif-plugged-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 795.217824] env[62109]: WARNING nova.compute.manager [req-89b31008-6a95-4917-b0a4-f94a6f352e68 req-48b47e09-208f-4a84-a4ee-bda2c838c1cf service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received unexpected event network-vif-plugged-e13a10c2-836a-412a-b1af-974b816d3971 for instance with vm_state building and task_state spawning. [ 795.255930] env[62109]: ERROR nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [req-781d472c-23ce-4b07-8c05-4228f70db761] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-781d472c-23ce-4b07-8c05-4228f70db761"}]} [ 795.261269] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.269856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bb0cacb6-614e-4758-89cd-892a8eb7cd0a tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.770s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.287250] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 795.315718] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 795.315951] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.332298] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 795.361132] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116420, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.369435] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 795.479259] env[62109]: DEBUG nova.network.neutron [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updating instance_info_cache with network_info: [{"id": "e13a10c2-836a-412a-b1af-974b816d3971", "address": "fa:16:3e:09:0a:3a", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a10c2-83", "ovs_interfaceid": "e13a10c2-836a-412a-b1af-974b816d3971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.657869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.658162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.658882] env[62109]: INFO nova.compute.manager [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Attaching volume 7843924b-bbc9-4f55-aacc-c4366b358390 to /dev/sdb [ 795.695095] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 795.706205] env[62109]: DEBUG nova.network.neutron [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.733255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92ce817-b980-4917-8b85-e4666ae11715 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.751980] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.753181] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.753181] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.753181] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.753181] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.753181] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.753574] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.753574] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.753878] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.753950] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.754112] env[62109]: DEBUG nova.virt.hardware [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.756026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad37fbf4-7c64-4a51-949b-52b5b331f99f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.761802] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7daade-54b9-4da5-90bf-846b10830f58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.771575] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0787505-41e6-4ec8-b5a1-35d095e548ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.783184] env[62109]: DEBUG nova.virt.block_device [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating existing volume attachment record: 2fa057b9-31d8-4e0f-bd96-48a82c647da6 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 795.861522] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116420, 'name': Rename_Task, 'duration_secs': 1.287827} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.863830] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 795.869039] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5b18149-a33c-4c9d-a397-5846c80604b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.870742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "7ace6356-1a81-4095-8286-c9b6d829062b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.871080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.883915] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 795.883915] env[62109]: value = "task-1116421" [ 795.883915] env[62109]: _type = "Task" [ 795.883915] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.910719] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.961176] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2991dbba-07f0-4c32-92c3-e09fb83d608c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.980296] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a090bf8d-a151-4b5b-8078-486b5d8c6614 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.985081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.985496] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance network_info: |[{"id": "e13a10c2-836a-412a-b1af-974b816d3971", "address": "fa:16:3e:09:0a:3a", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a10c2-83", "ovs_interfaceid": "e13a10c2-836a-412a-b1af-974b816d3971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 795.987016] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:0a:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e13a10c2-836a-412a-b1af-974b816d3971', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.994708] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating folder: Project (f94fd7a82dc0489597534c518365971b). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 795.995593] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cd0b0f9-d92b-4ef7-9b7d-aa2329350fa0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.029679] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797c4828-08f3-4031-ba84-9529547b1437 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.035729] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created folder: Project (f94fd7a82dc0489597534c518365971b) in parent group-v244329. [ 796.035729] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating folder: Instances. Parent ref: group-v244424. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 796.035729] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aca095c8-f91c-4940-bd5e-71004d668c9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.040999] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a371dac-1605-442c-9258-321fc90dcc3a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.048388] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created folder: Instances in parent group-v244424. [ 796.048742] env[62109]: DEBUG oslo.service.loopingcall [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.049299] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 796.049559] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-657eb814-3cdc-40cc-b0c4-e494751de84d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.076854] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.093470] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.093470] env[62109]: value = "task-1116426" [ 796.093470] env[62109]: _type = "Task" [ 796.093470] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.097627] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116426, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.184049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.184283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.184700] env[62109]: DEBUG nova.compute.manager [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 796.185579] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7516115-41af-4ead-9274-ee7f7ba9c7e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.196054] env[62109]: DEBUG nova.compute.manager [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 796.196622] env[62109]: DEBUG nova.objects.instance [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.210519] env[62109]: INFO nova.compute.manager [-] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Took 1.91 seconds to deallocate network for instance. [ 796.377391] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 796.398514] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116421, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.610034] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116426, 'name': CreateVM_Task, 'duration_secs': 0.455935} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.611234] env[62109]: ERROR nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [req-885fb961-2056-4b53-8af2-d1528d3c6b9b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-885fb961-2056-4b53-8af2-d1528d3c6b9b"}]} [ 796.611800] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 796.616190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.616954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.617267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 796.617716] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c3facdc-27e3-4c97-8bd5-6b3f30c7566a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.626330] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 796.626330] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524f2f8f-7acb-9ff2-4124-67a6bf445943" [ 796.626330] env[62109]: _type = "Task" [ 796.626330] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.640874] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524f2f8f-7acb-9ff2-4124-67a6bf445943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.641960] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 796.665341] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 796.665590] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.684381] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 796.705452] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 796.705452] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f0619d4-5b55-4a61-9a83-74e706a46145 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.710597] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 796.714422] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Successfully updated port: a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.719338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.719866] env[62109]: DEBUG oslo_vmware.api [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 796.719866] env[62109]: value = "task-1116428" [ 796.719866] env[62109]: _type = "Task" [ 796.719866] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.733939] env[62109]: DEBUG oslo_vmware.api [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.909197] env[62109]: DEBUG oslo_vmware.api [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116421, 'name': PowerOnVM_Task, 'duration_secs': 0.647021} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.909197] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 796.909197] env[62109]: INFO nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Took 11.09 seconds to spawn the instance on the hypervisor. [ 796.909197] env[62109]: DEBUG nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 796.909197] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33321ceb-df16-4864-985c-c5c0aff6b036 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.920631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.143821] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524f2f8f-7acb-9ff2-4124-67a6bf445943, 'name': SearchDatastore_Task, 'duration_secs': 0.022635} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.144469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.144672] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.145026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.145525] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.145525] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.145926] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-171db87c-68b7-455e-81a9-a0fc64736b50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.160079] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.160956] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 797.161726] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ad238ae-2cd4-4c6f-bba6-c824f3233d47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.170492] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 797.170492] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260c82a-d4cb-2794-6c52-87d5ab7c2a80" [ 797.170492] env[62109]: _type = "Task" [ 797.170492] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.184314] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260c82a-d4cb-2794-6c52-87d5ab7c2a80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.224962] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.225628] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.225628] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 797.248357] env[62109]: DEBUG oslo_vmware.api [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116428, 'name': PowerOffVM_Task, 'duration_secs': 0.487466} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.248734] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 797.248946] env[62109]: DEBUG nova.compute.manager [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 797.250641] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e6bd4f-75aa-4709-a5a1-1c99baf0bd6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.308431] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5872ec-08da-4a9a-a705-44736241a998 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.320647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87202bce-da94-47e4-8785-f2e816eef976 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.358480] env[62109]: DEBUG nova.compute.manager [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 797.359591] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1864d780-7b28-4ddc-b4b5-c7061611c6e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.362884] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29e5497-9c67-4f6f-9ef7-3cf3b913154e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.374704] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35032467-9b24-4126-a1ca-97c8d47e55a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.397691] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.438500] env[62109]: INFO nova.compute.manager [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Took 39.18 seconds to build instance. [ 797.683645] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5260c82a-d4cb-2794-6c52-87d5ab7c2a80, 'name': SearchDatastore_Task, 'duration_secs': 0.030587} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.684582] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62eae0fe-a9a4-46a4-bf26-821a435d870c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.693061] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 797.693061] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e7b4a1-71da-9531-ce4c-cb4d292e7984" [ 797.693061] env[62109]: _type = "Task" [ 797.693061] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.704877] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e7b4a1-71da-9531-ce4c-cb4d292e7984, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.740471] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received event network-changed-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.740723] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Refreshing instance network info cache due to event network-changed-e13a10c2-836a-412a-b1af-974b816d3971. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 797.742018] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Acquiring lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.742018] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Acquired lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.742018] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Refreshing network info cache for port e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 797.760205] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.760676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.779702] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.788186] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39e1eb8a-bab5-4570-b26a-bc3b4d75f21b tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.603s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.886843] env[62109]: INFO nova.compute.manager [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] instance snapshotting [ 797.887257] env[62109]: WARNING nova.compute.manager [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 797.892425] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b46ccb6-2b30-4135-9173-57cfbd1af470 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.933051] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ea7708-eab8-437a-bcd5-ea04ff759096 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.937359] env[62109]: ERROR nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [req-cc8884eb-4350-44c1-b3dc-9e64f8ebdbf0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cc8884eb-4350-44c1-b3dc-9e64f8ebdbf0"}]} [ 797.941384] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f1da3a44-022d-40c9-8790-bd80fe85bc62 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.637s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.963637] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 797.988684] env[62109]: DEBUG nova.network.neutron [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.996497] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 797.996956] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.015668] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 798.035340] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 798.207904] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e7b4a1-71da-9531-ce4c-cb4d292e7984, 'name': SearchDatastore_Task, 'duration_secs': 0.021653} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.211080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.211338] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 59f6adc7-d491-4a86-83f7-89128511e00f/59f6adc7-d491-4a86-83f7-89128511e00f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 798.211940] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27f120b2-2a20-43a3-9553-62c8783087b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.220385] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 798.220385] env[62109]: value = "task-1116430" [ 798.220385] env[62109]: _type = "Task" [ 798.220385] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.234246] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.264113] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 798.451512] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 798.451894] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-795c274f-3e35-4804-a47c-bc943f4174ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.466657] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 798.466657] env[62109]: value = "task-1116431" [ 798.466657] env[62109]: _type = "Task" [ 798.466657] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.485258] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116431, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.490464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.491167] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Instance network_info: |[{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 798.494610] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f9:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b8137fc-f23d-49b1-b19c-3123a5588f34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a11a7ca2-7088-4194-a63f-e4a9ed75ecc0', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.505184] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Creating folder: Project (b6ee24c114bd495e8f29eeda1f6b8bba). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.505920] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6c5148f-011e-49b3-9166-c8462bcedc65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.526047] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Created folder: Project (b6ee24c114bd495e8f29eeda1f6b8bba) in parent group-v244329. [ 798.526334] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Creating folder: Instances. Parent ref: group-v244429. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.527058] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec1203d8-49ff-4760-b274-4c3b576f37e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.540864] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Created folder: Instances in parent group-v244429. [ 798.541164] env[62109]: DEBUG oslo.service.loopingcall [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.541389] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 798.541614] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca30ac1a-e0a7-4124-a6e3-16368407b181 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.563419] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a68edd1-7ad0-46df-be76-784685ac888c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.572705] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updated VIF entry in instance network info cache for port e13a10c2-836a-412a-b1af-974b816d3971. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 798.573119] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updating instance_info_cache with network_info: [{"id": "e13a10c2-836a-412a-b1af-974b816d3971", "address": "fa:16:3e:09:0a:3a", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a10c2-83", "ovs_interfaceid": "e13a10c2-836a-412a-b1af-974b816d3971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.575465] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c983f2-19e4-4407-87b3-f3f28fc62ffc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.581039] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.581039] env[62109]: value = "task-1116434" [ 798.581039] env[62109]: _type = "Task" [ 798.581039] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.615745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5703347-80a3-40ce-b835-e62d726f3b86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.625942] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116434, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.630141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce5c912-7340-4886-b1df-537321d73d91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.653158] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.732265] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116430, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.794633] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.938071] env[62109]: DEBUG nova.compute.manager [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Received event network-changed-8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.938897] env[62109]: DEBUG nova.compute.manager [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Refreshing instance network info cache due to event network-changed-8c141056-fbc3-4508-a389-9a9ed6566325. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 798.941613] env[62109]: DEBUG oslo_concurrency.lockutils [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] Acquiring lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.941613] env[62109]: DEBUG oslo_concurrency.lockutils [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] Acquired lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.941613] env[62109]: DEBUG nova.network.neutron [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Refreshing network info cache for port 8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 798.990764] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116431, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.082056] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Releasing lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.082056] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Received event network-vif-deleted-28469078-1559-4ee4-93a9-9165165a7b4c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.082056] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-plugged-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.082056] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.082792] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.083160] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.085022] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] No waiting events found dispatching network-vif-plugged-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 799.085022] env[62109]: WARNING nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received unexpected event network-vif-plugged-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 for instance with vm_state building and task_state spawning. [ 799.085022] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-changed-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.085548] env[62109]: DEBUG nova.compute.manager [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing instance network info cache due to event network-changed-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 799.089017] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.089017] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.089017] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing network info cache for port a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 799.102653] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116434, 'name': CreateVM_Task, 'duration_secs': 0.418549} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.104307] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 799.107357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.107357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.107357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 799.107648] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd1855a-6ce6-4220-a82e-f549bb8cf900 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.116762] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 799.116762] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b23931-a42a-6bc6-a289-f8f3ab5e6456" [ 799.116762] env[62109]: _type = "Task" [ 799.116762] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.128163] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b23931-a42a-6bc6-a289-f8f3ab5e6456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.184073] env[62109]: ERROR nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [req-21622ef4-3e12-4c0c-9572-2fec11d0248c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21622ef4-3e12-4c0c-9572-2fec11d0248c"}]} [ 799.184073] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.522s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.184418] env[62109]: ERROR nova.compute.manager [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Setting instance vm_state to ERROR: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 574e9717-c25e-453d-8028-45d9e2f95398 (generation 76): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21622ef4-3e12-4c0c-9572-2fec11d0248c"}]} [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Traceback (most recent call last): [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self._delete_instance(context, instance, bdms) [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/manager.py", line 3305, in _delete_instance [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self._complete_deletion(context, instance) [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/manager.py", line 926, in _complete_deletion [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self._update_resource_tracker(context, instance) [ 799.184418] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/manager.py", line 693, in _update_resource_tracker [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self.rt.update_usage(context, instance, instance.node) [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] return f(*args, **kwargs) [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 702, in update_usage [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self._update(context.elevated(), self.compute_nodes[nodename]) [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self._update_to_placement(context, compute_node, startup) [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] raise attempt.get() [ 799.184767] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] six.reraise(self.value[0], self.value[1], self.value[2]) [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] raise value [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self.reportclient.update_from_provider_tree( [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1498, in update_from_provider_tree [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] self.set_inventory_for_provider( [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1003, in set_inventory_for_provider [ 799.185180] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] raise exception.ResourceProviderUpdateConflict( [ 799.185573] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 574e9717-c25e-453d-8028-45d9e2f95398 (generation 76): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21622ef4-3e12-4c0c-9572-2fec11d0248c"}]} [ 799.185573] env[62109]: ERROR nova.compute.manager [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] [ 799.189208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.733s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.191381] env[62109]: INFO nova.compute.claims [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.219092] env[62109]: DEBUG nova.objects.instance [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.237123] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51656} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.237123] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 59f6adc7-d491-4a86-83f7-89128511e00f/59f6adc7-d491-4a86-83f7-89128511e00f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 799.237123] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.237123] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c0bcf81-31c8-4019-92a2-d3c28265efb0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.245443] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 799.245443] env[62109]: value = "task-1116435" [ 799.245443] env[62109]: _type = "Task" [ 799.245443] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.258704] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.355413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.356131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.477153] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116431, 'name': CreateSnapshot_Task, 'duration_secs': 0.915088} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.478139] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 799.478371] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73594dad-e295-44f4-a161-1512849aa320 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.629251] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b23931-a42a-6bc6-a289-f8f3ab5e6456, 'name': SearchDatastore_Task, 'duration_secs': 0.014206} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.629587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.629828] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.630080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.630236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.630445] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.630725] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d8335bc-e9c2-43b4-a3c0-5c4579c89eab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.644314] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.644573] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 799.647794] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17c7c661-03f6-4ba6-81b4-f0151dd103da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.654827] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 799.654827] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527d9f9c-12dd-7b93-8e5a-ba0741218a2f" [ 799.654827] env[62109]: _type = "Task" [ 799.654827] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.665719] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527d9f9c-12dd-7b93-8e5a-ba0741218a2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.692023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.485s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.727671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.728240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.728712] env[62109]: DEBUG nova.network.neutron [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.729064] env[62109]: DEBUG nova.objects.instance [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'info_cache' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.764736] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088574} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.766989] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 799.766989] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c889d78-52ab-41ed-add6-6fb632ca4e2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.793259] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 59f6adc7-d491-4a86-83f7-89128511e00f/59f6adc7-d491-4a86-83f7-89128511e00f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 799.795796] env[62109]: DEBUG nova.network.neutron [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updated VIF entry in instance network info cache for port 8c141056-fbc3-4508-a389-9a9ed6566325. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 799.795796] env[62109]: DEBUG nova.network.neutron [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updating instance_info_cache with network_info: [{"id": "8c141056-fbc3-4508-a389-9a9ed6566325", "address": "fa:16:3e:3b:fb:62", "network": {"id": "f2e32a40-4ae4-4c71-901c-ff0d4e5fa042", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1356570714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "172388e3349b45c2aa63422749513204", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c141056-fb", "ovs_interfaceid": "8c141056-fbc3-4508-a389-9a9ed6566325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.797253] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d39f178-54be-4d1a-82bc-72485bc40822 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.825554] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 799.825554] env[62109]: value = "task-1116436" [ 799.825554] env[62109]: _type = "Task" [ 799.825554] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.836057] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.859134] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 799.947915] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updated VIF entry in instance network info cache for port a11a7ca2-7088-4194-a63f-e4a9ed75ecc0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 799.948317] env[62109]: DEBUG nova.network.neutron [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.002679] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 800.003426] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4c5378f7-d44d-4150-b6bc-83e143142858 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.235515] env[62109]: DEBUG nova.objects.base [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Object Instance<1399f618-3a93-4731-a59b-f98306d6cd52> lazy-loaded attributes: flavor,info_cache {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 800.307078] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 800.307078] env[62109]: value = "task-1116437" [ 800.307078] env[62109]: _type = "Task" [ 800.307078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.315590] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527d9f9c-12dd-7b93-8e5a-ba0741218a2f, 'name': SearchDatastore_Task, 'duration_secs': 0.013367} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.328548] env[62109]: DEBUG oslo_concurrency.lockutils [req-46d9bef6-e193-471b-a046-84d72f6294da req-da9a3933-94b4-4ced-8d43-d8a3f870dcc3 service nova] Releasing lock "refresh_cache-9b2968bb-ed06-4740-b43e-b4aa1fac76dd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.330200] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6763209d-8618-43ce-9c80-82e9d5ccea72 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.344057] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 800.344057] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52212e8a-d089-2ad0-fc21-65a097fe8cdd" [ 800.344057] env[62109]: _type = "Task" [ 800.344057] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.351969] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116437, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.352735] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116436, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.358529] env[62109]: DEBUG nova.scheduler.client.report [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 800.373558] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52212e8a-d089-2ad0-fc21-65a097fe8cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.016109} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.376369] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.376797] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 7afbb35b-9865-40a7-8b37-d6a661a186a9/7afbb35b-9865-40a7-8b37-d6a661a186a9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 800.377646] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9967b8a9-dd0f-47f2-9c58-45e419706188 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.380650] env[62109]: DEBUG nova.scheduler.client.report [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 800.380865] env[62109]: DEBUG nova.compute.provider_tree [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.391100] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 800.391100] env[62109]: value = "task-1116438" [ 800.391100] env[62109]: _type = "Task" [ 800.391100] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.399509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.405193] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.406349] env[62109]: DEBUG nova.scheduler.client.report [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 800.436518] env[62109]: DEBUG nova.scheduler.client.report [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 800.452944] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6b50e1d-8e79-4434-aec6-b68751d31e89 req-137f807d-2216-43e9-8f21-97cefcde3ada service nova] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.836090] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116437, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.858329] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116436, 'name': ReconfigVM_Task, 'duration_secs': 0.737715} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.858774] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 59f6adc7-d491-4a86-83f7-89128511e00f/59f6adc7-d491-4a86-83f7-89128511e00f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.860853] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a837034-e6b4-4124-8209-4f9b3d24e5a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.867411] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f96ded-150c-4939-8618-8a49eae38996 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.873381] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 800.873381] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244428', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'name': 'volume-7843924b-bbc9-4f55-aacc-c4366b358390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '028300fd-f9f8-4606-a39e-53582f830eeb', 'attached_at': '', 'detached_at': '', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'serial': '7843924b-bbc9-4f55-aacc-c4366b358390'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 800.873609] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3e55cd-fe2d-41a3-b97f-8a1eacd109a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.902489] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fcd95a-a499-4285-b784-ac3e2da2dfcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.907973] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 800.907973] env[62109]: value = "task-1116439" [ 800.907973] env[62109]: _type = "Task" [ 800.907973] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.909594] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a86a981-2ec7-455f-86ad-bec44bf3751e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.947465] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] volume-7843924b-bbc9-4f55-aacc-c4366b358390/volume-7843924b-bbc9-4f55-aacc-c4366b358390.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.947891] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116438, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.978314] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d248398c-f3a3-43f6-a34b-5b3933fb41c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.999550] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d9f2d0-9c30-4482-aab4-974d03d9c4f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.002150] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116439, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.005046] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 801.006343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f3d00e-3d9f-4ac6-a43c-754960345ad2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.013340] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 801.013340] env[62109]: value = "task-1116440" [ 801.013340] env[62109]: _type = "Task" [ 801.013340] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.015755] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223bdeef-a116-4dd6-9417-290003083295 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.024919] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 801.025124] env[62109]: ERROR oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk due to incomplete transfer. [ 801.026115] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-58f30acd-1a8b-4e17-8095-20414fe3e759 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.039428] env[62109]: DEBUG nova.compute.provider_tree [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.045018] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bbb12-6c1e-822a-9bb5-390068c1ea70/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 801.045251] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Uploaded image e68eba09-0579-4426-a954-9275e3a6d751 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 801.047506] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 801.047809] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.048945] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a57efcd0-102f-456c-9a21-8ff5879f46c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.056531] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 801.056531] env[62109]: value = "task-1116441" [ 801.056531] env[62109]: _type = "Task" [ 801.056531] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.066818] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116441, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.326341] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116437, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.331736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.404920] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665343} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.405242] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 7afbb35b-9865-40a7-8b37-d6a661a186a9/7afbb35b-9865-40a7-8b37-d6a661a186a9.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 801.405471] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.405733] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f88f974-513c-4eb4-9914-377335f42917 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.418320] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 801.418320] env[62109]: value = "task-1116442" [ 801.418320] env[62109]: _type = "Task" [ 801.418320] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.433781] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116439, 'name': Rename_Task, 'duration_secs': 0.282001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.438552] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 801.438552] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.438895] env[62109]: DEBUG nova.network.neutron [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.440466] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a101cec5-6fb5-47d2-96fc-98f259afdfb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.451356] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 801.451356] env[62109]: value = "task-1116443" [ 801.451356] env[62109]: _type = "Task" [ 801.451356] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.462262] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.533777] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.567456] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116441, 'name': Destroy_Task, 'duration_secs': 0.379382} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.567778] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Destroyed the VM [ 801.568037] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 801.568346] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d85d860-a6e3-437e-a424-272796d5fdce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.576091] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 801.576091] env[62109]: value = "task-1116444" [ 801.576091] env[62109]: _type = "Task" [ 801.576091] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.580261] env[62109]: DEBUG nova.scheduler.client.report [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 801.580523] env[62109]: DEBUG nova.compute.provider_tree [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 77 to 78 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 801.580714] env[62109]: DEBUG nova.compute.provider_tree [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.589627] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116444, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.827721] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116437, 'name': CloneVM_Task, 'duration_secs': 1.729651} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.828390] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Created linked-clone VM from snapshot [ 801.828798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb6798c-32de-4588-b7c0-5a25cb201dc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.842019] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Uploading image 37d29dc5-a20b-4867-affe-5b6316438d64 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 801.863482] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 801.863482] env[62109]: value = "vm-244433" [ 801.863482] env[62109]: _type = "VirtualMachine" [ 801.863482] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 801.863634] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e519adce-2c5e-4d55-9a7a-08be630efbcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.874053] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease: (returnval){ [ 801.874053] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bcc40e-b1d2-b330-e8fd-decbc617813c" [ 801.874053] env[62109]: _type = "HttpNfcLease" [ 801.874053] env[62109]: } obtained for exporting VM: (result){ [ 801.874053] env[62109]: value = "vm-244433" [ 801.874053] env[62109]: _type = "VirtualMachine" [ 801.874053] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 801.874053] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the lease: (returnval){ [ 801.874053] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bcc40e-b1d2-b330-e8fd-decbc617813c" [ 801.874053] env[62109]: _type = "HttpNfcLease" [ 801.874053] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 801.881759] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 801.881759] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bcc40e-b1d2-b330-e8fd-decbc617813c" [ 801.881759] env[62109]: _type = "HttpNfcLease" [ 801.881759] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 801.932764] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092449} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.933312] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.933962] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1d22f6-aa3f-4832-8d65-b4452e857c75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.951307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.962654] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 7afbb35b-9865-40a7-8b37-d6a661a186a9/7afbb35b-9865-40a7-8b37-d6a661a186a9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.967838] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af0543b3-e561-43f8-904e-b17b59c42420 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.989612] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116443, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.991288] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 801.991288] env[62109]: value = "task-1116446" [ 801.991288] env[62109]: _type = "Task" [ 801.991288] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.001233] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.031892] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116440, 'name': ReconfigVM_Task, 'duration_secs': 0.528432} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.032229] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfigured VM instance instance-00000029 to attach disk [datastore2] volume-7843924b-bbc9-4f55-aacc-c4366b358390/volume-7843924b-bbc9-4f55-aacc-c4366b358390.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.037201] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1304999a-22e9-4288-8ae2-67bfd77f4b51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.055773] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 802.055773] env[62109]: value = "task-1116447" [ 802.055773] env[62109]: _type = "Task" [ 802.055773] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.065516] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.088657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.899s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.089255] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.093241] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116444, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.093241] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.632s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.093402] env[62109]: DEBUG nova.objects.instance [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lazy-loading 'resources' on Instance uuid c90ace77-5b8b-4b04-aa57-d47ad17df01e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.384980] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.384980] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bcc40e-b1d2-b330-e8fd-decbc617813c" [ 802.384980] env[62109]: _type = "HttpNfcLease" [ 802.384980] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 802.385362] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 802.385362] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bcc40e-b1d2-b330-e8fd-decbc617813c" [ 802.385362] env[62109]: _type = "HttpNfcLease" [ 802.385362] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 802.386140] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee2685c-0754-4ef6-94ec-53416a5399aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.396083] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 802.396288] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 802.465055] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 802.465337] env[62109]: DEBUG oslo_vmware.api [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116443, 'name': PowerOnVM_Task, 'duration_secs': 0.639258} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.465554] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70a4b23d-5bce-4fca-a7b8-f3c7f7514897 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.467339] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 802.467579] env[62109]: INFO nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Took 9.59 seconds to spawn the instance on the hypervisor. [ 802.467777] env[62109]: DEBUG nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 802.468586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612337b4-4d77-4ef6-bc26-bac341b82bbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.479976] env[62109]: DEBUG oslo_vmware.api [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 802.479976] env[62109]: value = "task-1116448" [ 802.479976] env[62109]: _type = "Task" [ 802.479976] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.488676] env[62109]: DEBUG oslo_vmware.api [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.502781] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.505932] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2636fe67-330e-41e0-9566-a1a2b2ad6879 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.565980] env[62109]: DEBUG oslo_vmware.api [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116447, 'name': ReconfigVM_Task, 'duration_secs': 0.236949} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.566349] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244428', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'name': 'volume-7843924b-bbc9-4f55-aacc-c4366b358390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '028300fd-f9f8-4606-a39e-53582f830eeb', 'attached_at': '', 'detached_at': '', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'serial': '7843924b-bbc9-4f55-aacc-c4366b358390'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 802.587524] env[62109]: DEBUG oslo_vmware.api [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116444, 'name': RemoveSnapshot_Task, 'duration_secs': 0.970672} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.587823] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 802.588073] env[62109]: INFO nova.compute.manager [None req-f2a328d3-a608-4a95-bcc7-8c102b7868fb tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 15.62 seconds to snapshot the instance on the hypervisor. [ 802.594022] env[62109]: DEBUG nova.compute.utils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.595573] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.595573] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 802.651458] env[62109]: DEBUG nova.policy [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5bc942fb85a46daa359c9fae576d4d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b70ad018a9443888f3bd51b32782554', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 802.994971] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Successfully created port: f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.010729] env[62109]: INFO nova.compute.manager [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Took 32.43 seconds to build instance. [ 803.020171] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116446, 'name': ReconfigVM_Task, 'duration_secs': 0.674028} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.026108] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 7afbb35b-9865-40a7-8b37-d6a661a186a9/7afbb35b-9865-40a7-8b37-d6a661a186a9.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.027113] env[62109]: DEBUG oslo_vmware.api [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116448, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.028420] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abae6543-d450-4c86-bb2c-092f6128ab75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.038240] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 803.038240] env[62109]: value = "task-1116449" [ 803.038240] env[62109]: _type = "Task" [ 803.038240] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.056685] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116449, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.074242] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa053f4-b8d5-421a-ab93-d812e653915f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.084036] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675676f5-6e46-4794-bce2-4fb0a162afc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.127894] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.135542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cba2d3-37aa-4264-a83b-aba842d73ae0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.148604] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6005dd-3607-4c0c-9770-248c901da894 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.168177] env[62109]: DEBUG nova.compute.provider_tree [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 803.498483] env[62109]: DEBUG oslo_vmware.api [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116448, 'name': PowerOnVM_Task, 'duration_secs': 0.912234} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.499083] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 803.499630] env[62109]: DEBUG nova.compute.manager [None req-f3263653-4520-4459-89f8-69ace9fa6f49 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 803.500743] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1778706a-5bcb-453f-b9b9-5ca3fb3d472b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.512687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7958bc5b-269f-4011-aaf7-e4bc09a9da13 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.617s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.554103] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116449, 'name': Rename_Task, 'duration_secs': 0.280006} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.554411] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 803.554718] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d2df185-a4de-4e12-8a6b-347254b2ae8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.562949] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 803.562949] env[62109]: value = "task-1116450" [ 803.562949] env[62109]: _type = "Task" [ 803.562949] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.582307] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.645891] env[62109]: DEBUG nova.objects.instance [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lazy-loading 'flavor' on Instance uuid 028300fd-f9f8-4606-a39e-53582f830eeb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.701508] env[62109]: DEBUG nova.scheduler.client.report [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 803.701919] env[62109]: DEBUG nova.compute.provider_tree [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 78 to 79 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 803.702192] env[62109]: DEBUG nova.compute.provider_tree [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 804.074653] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.150424] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 804.156087] env[62109]: DEBUG oslo_concurrency.lockutils [None req-690ae560-efa3-433d-baf0-0feb5c21b472 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.498s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.174020] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.174532] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.175638] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.175869] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.176070] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.176286] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.176636] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.176922] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.177193] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.177407] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.177697] env[62109]: DEBUG nova.virt.hardware [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.178944] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283ee8a9-3c95-4339-8c9d-591f916b4e98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.187856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6013f139-aa4c-4d7c-9025-234d6aad9bc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.207423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.114s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.209905] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.723s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.210175] env[62109]: DEBUG nova.objects.instance [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lazy-loading 'resources' on Instance uuid c44d618e-c781-47ba-b191-cecc01dcfe9b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 804.303024] env[62109]: INFO nova.scheduler.client.report [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted allocations for instance c90ace77-5b8b-4b04-aa57-d47ad17df01e [ 804.316404] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.316404] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.316404] env[62109]: INFO nova.compute.manager [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Shelving [ 804.576359] env[62109]: DEBUG oslo_vmware.api [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116450, 'name': PowerOnVM_Task, 'duration_secs': 0.747564} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.576763] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 804.577044] env[62109]: INFO nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Took 8.88 seconds to spawn the instance on the hypervisor. [ 804.577248] env[62109]: DEBUG nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 804.578229] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a74e96-b7c5-4703-8f57-a25380aee992 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.682617] env[62109]: DEBUG nova.compute.manager [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Received event network-vif-plugged-f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.683034] env[62109]: DEBUG oslo_concurrency.lockutils [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] Acquiring lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.683325] env[62109]: DEBUG oslo_concurrency.lockutils [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.683591] env[62109]: DEBUG oslo_concurrency.lockutils [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.683873] env[62109]: DEBUG nova.compute.manager [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] No waiting events found dispatching network-vif-plugged-f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 804.684102] env[62109]: WARNING nova.compute.manager [req-f147170f-2e83-4618-8039-850c5a548242 req-8e8976c9-dba8-4252-8ed7-03650879a971 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Received unexpected event network-vif-plugged-f5f52514-4146-44d3-9e0e-5ee87f782604 for instance with vm_state building and task_state spawning. [ 804.726300] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Successfully updated port: f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 804.814810] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ff557645-374b-4cdf-8773-22c31173176d tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c90ace77-5b8b-4b04-aa57-d47ad17df01e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.127s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.828858] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 804.829510] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcf224a0-e6a0-4553-a721-424c2a05be6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.840058] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 804.840058] env[62109]: value = "task-1116451" [ 804.840058] env[62109]: _type = "Task" [ 804.840058] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.850345] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.859792] env[62109]: DEBUG oslo_concurrency.lockutils [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.860054] env[62109]: DEBUG oslo_concurrency.lockutils [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.098310] env[62109]: INFO nova.compute.manager [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Took 33.56 seconds to build instance. [ 805.197011] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84339222-d737-4dbe-91d5-e3016533b0c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.205492] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba297bc-def7-4d7d-b0ff-61568d499406 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.244569] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.244720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquired lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.244978] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.248167] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82de967f-e1a9-4bfa-a2a9-e67f232d4530 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.258186] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb370c4-192a-4622-8fc7-271522a90561 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.274104] env[62109]: DEBUG nova.compute.provider_tree [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.357029] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116451, 'name': PowerOffVM_Task, 'duration_secs': 0.219365} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.357029] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 805.357029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef6f4a5-50d6-4813-8c89-29510e96072f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.379206] env[62109]: INFO nova.compute.manager [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Detaching volume 7843924b-bbc9-4f55-aacc-c4366b358390 [ 805.382464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe2ffc4-96b6-490b-b926-216dd2f11fac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.426866] env[62109]: INFO nova.virt.block_device [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Attempting to driver detach volume 7843924b-bbc9-4f55-aacc-c4366b358390 from mountpoint /dev/sdb [ 805.428291] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 805.428460] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244428', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'name': 'volume-7843924b-bbc9-4f55-aacc-c4366b358390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '028300fd-f9f8-4606-a39e-53582f830eeb', 'attached_at': '', 'detached_at': '', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'serial': '7843924b-bbc9-4f55-aacc-c4366b358390'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 805.432423] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89253d18-53ab-4ce3-9f79-33e00e36cc4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.457244] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c171782e-1c2d-4918-8aae-bf839b1c74f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.466041] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89c2678-e0c5-44db-97ac-1d4750cc76f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.487324] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f231b72-bc5c-4782-8753-0acd38f02931 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.502994] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] The volume has not been displaced from its original location: [datastore2] volume-7843924b-bbc9-4f55-aacc-c4366b358390/volume-7843924b-bbc9-4f55-aacc-c4366b358390.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 805.510314] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfiguring VM instance instance-00000029 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 805.510314] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b55ff37-561f-4e10-91d8-3d9eb7b2d42b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.527552] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 805.527552] env[62109]: value = "task-1116452" [ 805.527552] env[62109]: _type = "Task" [ 805.527552] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.536483] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.600819] env[62109]: DEBUG oslo_concurrency.lockutils [None req-560e607f-fb80-460b-bf7d-6325c74fe1f4 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.347s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.776634] env[62109]: DEBUG nova.scheduler.client.report [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.782987] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.898322] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 805.901449] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-07d7fbe1-7384-4e4e-a1bd-49decfb9cb91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.907747] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 805.907747] env[62109]: value = "task-1116453" [ 805.907747] env[62109]: _type = "Task" [ 805.907747] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.918486] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116453, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.937028] env[62109]: DEBUG nova.network.neutron [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updating instance_info_cache with network_info: [{"id": "f5f52514-4146-44d3-9e0e-5ee87f782604", "address": "fa:16:3e:b3:a9:5a", "network": {"id": "6370db82-9650-49a7-8ebf-5c3d3e0c00df", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1066537298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b70ad018a9443888f3bd51b32782554", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f52514-41", "ovs_interfaceid": "f5f52514-4146-44d3-9e0e-5ee87f782604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.041273] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116452, 'name': ReconfigVM_Task, 'duration_secs': 0.249448} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.041667] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Reconfigured VM instance instance-00000029 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 806.049567] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd2cc382-0817-47d5-b610-4fa7fb940d31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.078021] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 806.078021] env[62109]: value = "task-1116454" [ 806.078021] env[62109]: _type = "Task" [ 806.078021] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.085072] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.282696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.289459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.291450] env[62109]: INFO nova.compute.claims [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.309734] env[62109]: INFO nova.scheduler.client.report [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted allocations for instance c44d618e-c781-47ba-b191-cecc01dcfe9b [ 806.419206] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116453, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.443064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Releasing lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.443064] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Instance network_info: |[{"id": "f5f52514-4146-44d3-9e0e-5ee87f782604", "address": "fa:16:3e:b3:a9:5a", "network": {"id": "6370db82-9650-49a7-8ebf-5c3d3e0c00df", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1066537298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b70ad018a9443888f3bd51b32782554", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f52514-41", "ovs_interfaceid": "f5f52514-4146-44d3-9e0e-5ee87f782604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 806.443378] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:a9:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5f52514-4146-44d3-9e0e-5ee87f782604', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 806.450368] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Creating folder: Project (1b70ad018a9443888f3bd51b32782554). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 806.450939] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9c9e859-6d68-45c1-b973-88edd519ebcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.459127] env[62109]: DEBUG nova.compute.manager [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-changed-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 806.459322] env[62109]: DEBUG nova.compute.manager [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing instance network info cache due to event network-changed-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 806.459549] env[62109]: DEBUG oslo_concurrency.lockutils [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.459741] env[62109]: DEBUG oslo_concurrency.lockutils [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.459924] env[62109]: DEBUG nova.network.neutron [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing network info cache for port a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 806.464016] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Created folder: Project (1b70ad018a9443888f3bd51b32782554) in parent group-v244329. [ 806.464016] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Creating folder: Instances. Parent ref: group-v244434. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 806.464016] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65d4fc94-cbbf-455a-8311-a8da291225d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.473611] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Created folder: Instances in parent group-v244434. [ 806.473870] env[62109]: DEBUG oslo.service.loopingcall [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.474490] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 806.474490] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47c1f64b-1145-4ea6-b3ba-8c95cbd9cf1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.493473] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 806.493473] env[62109]: value = "task-1116457" [ 806.493473] env[62109]: _type = "Task" [ 806.493473] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.501238] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116457, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.575960] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "a9fb75d5-e303-4f31-888d-528963ab23b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.576344] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.577550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.577550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.577550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.579753] env[62109]: INFO nova.compute.manager [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Terminating instance [ 806.586809] env[62109]: DEBUG nova.compute.manager [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.587081] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 806.587954] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cb5887-0f1f-41ef-8f08-9339653906a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.597427] env[62109]: DEBUG oslo_vmware.api [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116454, 'name': ReconfigVM_Task, 'duration_secs': 0.194803} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.600283] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244428', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'name': 'volume-7843924b-bbc9-4f55-aacc-c4366b358390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '028300fd-f9f8-4606-a39e-53582f830eeb', 'attached_at': '', 'detached_at': '', 'volume_id': '7843924b-bbc9-4f55-aacc-c4366b358390', 'serial': '7843924b-bbc9-4f55-aacc-c4366b358390'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 806.603184] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 806.604212] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b078ddd6-4c9b-4ab7-8588-b38a06581018 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.611346] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 806.611346] env[62109]: value = "task-1116458" [ 806.611346] env[62109]: _type = "Task" [ 806.611346] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.623866] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.725925] env[62109]: DEBUG nova.compute.manager [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Received event network-changed-f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 806.726313] env[62109]: DEBUG nova.compute.manager [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Refreshing instance network info cache due to event network-changed-f5f52514-4146-44d3-9e0e-5ee87f782604. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 806.726509] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] Acquiring lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.726760] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] Acquired lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.727076] env[62109]: DEBUG nova.network.neutron [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Refreshing network info cache for port f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 806.818488] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0b329fff-65fb-4d80-b215-e737acb2dcc9 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "c44d618e-c781-47ba-b191-cecc01dcfe9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.021s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.922065] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116453, 'name': CreateSnapshot_Task, 'duration_secs': 0.740846} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.922065] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 806.922943] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45e49d1-2918-4394-9d53-2383bcb3d3dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.005449] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116457, 'name': CreateVM_Task, 'duration_secs': 0.482398} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.005630] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 807.006763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.006958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.007527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 807.007602] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4bb2710-5ab4-4d4b-80ac-8112378dde59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.012234] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 807.012234] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5228d458-8a73-a927-90b5-fea84f4b3894" [ 807.012234] env[62109]: _type = "Task" [ 807.012234] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.020276] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5228d458-8a73-a927-90b5-fea84f4b3894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.125416] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116458, 'name': PowerOffVM_Task, 'duration_secs': 0.274171} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.125718] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 807.125858] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 807.126129] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d7d36df-3fbe-4f1a-b1f1-5f0ff5ebbef9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.175681] env[62109]: DEBUG nova.objects.instance [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lazy-loading 'flavor' on Instance uuid 028300fd-f9f8-4606-a39e-53582f830eeb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.194583] env[62109]: DEBUG nova.network.neutron [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updated VIF entry in instance network info cache for port a11a7ca2-7088-4194-a63f-e4a9ed75ecc0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 807.194965] env[62109]: DEBUG nova.network.neutron [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.442942] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 807.446093] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-80b5f7a6-f0f9-4a25-9719-df173578b81f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.459869] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 807.459869] env[62109]: value = "task-1116460" [ 807.459869] env[62109]: _type = "Task" [ 807.459869] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.473472] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116460, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.504295] env[62109]: DEBUG nova.network.neutron [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updated VIF entry in instance network info cache for port f5f52514-4146-44d3-9e0e-5ee87f782604. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 807.506709] env[62109]: DEBUG nova.network.neutron [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updating instance_info_cache with network_info: [{"id": "f5f52514-4146-44d3-9e0e-5ee87f782604", "address": "fa:16:3e:b3:a9:5a", "network": {"id": "6370db82-9650-49a7-8ebf-5c3d3e0c00df", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1066537298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b70ad018a9443888f3bd51b32782554", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f52514-41", "ovs_interfaceid": "f5f52514-4146-44d3-9e0e-5ee87f782604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.533875] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5228d458-8a73-a927-90b5-fea84f4b3894, 'name': SearchDatastore_Task, 'duration_secs': 0.018256} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.534383] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.534844] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.535219] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.535498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.535818] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.540113] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d0ae74b-d13f-45eb-9e75-34858b1a9326 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.550102] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.550329] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 807.551768] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33c8354f-e740-481e-a011-63dc8253118b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.558489] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 807.558489] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527fc0b0-10ec-78af-cd33-ed4f4bed82ab" [ 807.558489] env[62109]: _type = "Task" [ 807.558489] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.569231] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527fc0b0-10ec-78af-cd33-ed4f4bed82ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.697641] env[62109]: DEBUG oslo_concurrency.lockutils [req-66c584bf-ad25-4db0-ab7b-7631e08c0e22 req-2bef54a5-f3f6-4a37-b46a-fba7759001ba service nova] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.733433] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a205564-a23f-410c-8c38-ddacbacfcee5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.742832] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6588e6b7-2e01-4099-8909-9323dd6e4112 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.774629] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020bcedb-2fa7-4439-b7fa-16e2f269687e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.782570] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff2805f-bea8-4af6-88c7-31ad60ae3013 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.798181] env[62109]: DEBUG nova.compute.provider_tree [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.973584] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116460, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.976938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "b1321874-8f97-4444-9f9c-d586d51a9e92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.977191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.004236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.004547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.008845] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f4eec01-f0aa-4155-9b1b-6ba39a01936b req-f79254a9-312f-40d3-b5f7-39472a735728 service nova] Releasing lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.072803] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527fc0b0-10ec-78af-cd33-ed4f4bed82ab, 'name': SearchDatastore_Task, 'duration_secs': 0.018891} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.073808] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a29649-3278-406b-85d4-4144cd1b5e52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.080302] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 808.080302] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52598d3d-aaf5-8361-1952-e6e4635259b4" [ 808.080302] env[62109]: _type = "Task" [ 808.080302] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.088376] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52598d3d-aaf5-8361-1952-e6e4635259b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.186397] env[62109]: DEBUG oslo_concurrency.lockutils [None req-262c34dd-93a6-4dbb-91bc-e5fa19927c9e tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.326s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.300630] env[62109]: DEBUG nova.scheduler.client.report [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.471014] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116460, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.479735] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 808.506730] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 808.591735] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52598d3d-aaf5-8361-1952-e6e4635259b4, 'name': SearchDatastore_Task, 'duration_secs': 0.025232} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.591735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.592076] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8/2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 808.592401] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3ae68d2-361f-48c9-8352-a47c282868d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.599742] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 808.599742] env[62109]: value = "task-1116461" [ 808.599742] env[62109]: _type = "Task" [ 808.599742] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.610424] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.805985] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.806623] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 808.809516] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.029s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.811257] env[62109]: INFO nova.compute.claims [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.920081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.920081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.972715] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116460, 'name': CloneVM_Task} progress is 95%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.003037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.029356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.111806] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116461, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.317152] env[62109]: DEBUG nova.compute.utils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.321069] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 809.321268] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 809.365134] env[62109]: DEBUG nova.policy [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a65ab55ee66140f2a825e4347258d12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57a1783401e34096b84023fc70da3840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 809.473938] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116460, 'name': CloneVM_Task, 'duration_secs': 1.778412} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.474346] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Created linked-clone VM from snapshot [ 809.474990] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c404e9-90b1-4ef1-8ade-2b64f134efed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.482397] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Uploading image cb79f804-aa22-44e6-95cf-7d3baf7cae8b {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 809.506116] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 809.506116] env[62109]: value = "vm-244438" [ 809.506116] env[62109]: _type = "VirtualMachine" [ 809.506116] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 809.506478] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4552ca96-d9cf-4fe4-af3f-5e1915d4d5fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.514324] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lease: (returnval){ [ 809.514324] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527110f8-9d17-e870-5175-59d7a2a03405" [ 809.514324] env[62109]: _type = "HttpNfcLease" [ 809.514324] env[62109]: } obtained for exporting VM: (result){ [ 809.514324] env[62109]: value = "vm-244438" [ 809.514324] env[62109]: _type = "VirtualMachine" [ 809.514324] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 809.514621] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the lease: (returnval){ [ 809.514621] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527110f8-9d17-e870-5175-59d7a2a03405" [ 809.514621] env[62109]: _type = "HttpNfcLease" [ 809.514621] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 809.521865] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 809.521865] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527110f8-9d17-e870-5175-59d7a2a03405" [ 809.521865] env[62109]: _type = "HttpNfcLease" [ 809.521865] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 809.611083] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569171} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.611374] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8/2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 809.611597] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 809.611864] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-968f5b9e-5eb5-406b-a409-38a4785d0a3a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.618691] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 809.618691] env[62109]: value = "task-1116463" [ 809.618691] env[62109]: _type = "Task" [ 809.618691] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.627640] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.664828] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Successfully created port: 982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.724053] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 809.724324] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 809.724540] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleting the datastore file [datastore1] a9fb75d5-e303-4f31-888d-528963ab23b7 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.724907] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c0fb5c2-8924-4b63-9c67-c1e821d4ac36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.732025] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 809.732025] env[62109]: value = "task-1116464" [ 809.732025] env[62109]: _type = "Task" [ 809.732025] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.741426] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.824872] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 810.023710] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 810.023710] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527110f8-9d17-e870-5175-59d7a2a03405" [ 810.023710] env[62109]: _type = "HttpNfcLease" [ 810.023710] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 810.026460] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 810.026460] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527110f8-9d17-e870-5175-59d7a2a03405" [ 810.026460] env[62109]: _type = "HttpNfcLease" [ 810.026460] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 810.027531] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b24c3e8-c5bb-409a-b2cc-442b532a379d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.035508] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 810.036403] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 810.131276] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071155} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.131571] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.132403] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458c0d79-2fa1-4c83-831b-e64704a15653 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.169087] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8/2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.175482] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5e21a73-8258-401d-891b-4f5164fdef56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.190237] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9ef3c64f-d1b8-4caa-bacf-e12361e30c08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.199762] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 810.199762] env[62109]: value = "task-1116465" [ 810.199762] env[62109]: _type = "Task" [ 810.199762] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.211352] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.241622] env[62109]: DEBUG oslo_vmware.api [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202451} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.241916] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.242125] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 810.242314] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 810.242488] env[62109]: INFO nova.compute.manager [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Took 3.66 seconds to destroy the instance on the hypervisor. [ 810.242732] env[62109]: DEBUG oslo.service.loopingcall [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.242933] env[62109]: DEBUG nova.compute.manager [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.243045] env[62109]: DEBUG nova.network.neutron [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 810.381623] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ec007c-8d7b-4e46-8f18-8c022c181645 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.391436] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e74ed9-d1d7-4053-8dd1-a2103bda16e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.428890] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc4ed8b-3814-4169-8974-ed343676f7c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.437355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c974f-1242-4b24-882a-f0ae35c3e7ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.451718] env[62109]: DEBUG nova.compute.provider_tree [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.568384] env[62109]: DEBUG nova.compute.manager [req-c72fecd2-d45b-46b0-a19d-c7b342ecc4d4 req-874bbcdd-143b-4b7f-9120-4d90bf995ea0 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Received event network-vif-deleted-86903ba6-b62e-4930-a50b-26da3e8cfb63 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 810.568384] env[62109]: INFO nova.compute.manager [req-c72fecd2-d45b-46b0-a19d-c7b342ecc4d4 req-874bbcdd-143b-4b7f-9120-4d90bf995ea0 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Neutron deleted interface 86903ba6-b62e-4930-a50b-26da3e8cfb63; detaching it from the instance and deleting it from the info cache [ 810.568705] env[62109]: DEBUG nova.network.neutron [req-c72fecd2-d45b-46b0-a19d-c7b342ecc4d4 req-874bbcdd-143b-4b7f-9120-4d90bf995ea0 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.711016] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116465, 'name': ReconfigVM_Task, 'duration_secs': 0.454332} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.711486] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8/2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.712241] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27c904ad-bd90-4f7e-bb11-dbc363040d9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.719598] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 810.719598] env[62109]: value = "task-1116466" [ 810.719598] env[62109]: _type = "Task" [ 810.719598] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.728623] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116466, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.843234] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 810.975840] env[62109]: ERROR nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [req-b5360409-ccf8-4bea-bac9-11d4b21fa29d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b5360409-ccf8-4bea-bac9-11d4b21fa29d"}]} [ 810.994845] env[62109]: DEBUG nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 811.014018] env[62109]: DEBUG nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 811.014470] env[62109]: DEBUG nova.compute.provider_tree [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.036027] env[62109]: DEBUG nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 811.045504] env[62109]: DEBUG nova.network.neutron [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.063968] env[62109]: DEBUG nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 811.072925] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-437cbf28-2a7e-4da1-8b1c-f0d7963d82c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.083713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8663fe-8fe9-4333-be60-8627506f33b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.125042] env[62109]: DEBUG nova.compute.manager [req-c72fecd2-d45b-46b0-a19d-c7b342ecc4d4 req-874bbcdd-143b-4b7f-9120-4d90bf995ea0 service nova] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Detach interface failed, port_id=86903ba6-b62e-4930-a50b-26da3e8cfb63, reason: Instance a9fb75d5-e303-4f31-888d-528963ab23b7 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 811.170884] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.171162] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.171324] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.171502] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.171649] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.171853] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.172147] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.172344] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.172540] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.173287] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.173533] env[62109]: DEBUG nova.virt.hardware [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.178250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43a69af-933e-4e61-9d50-0460d4247269 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.191375] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 811.192411] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b63830-def7-40c4-98c0-de0ee9c0e61a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.199074] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cbc9f4-8661-431f-8d3d-d3544028e566 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.206645] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 811.206900] env[62109]: ERROR oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk due to incomplete transfer. [ 811.207765] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0d3d6dad-f292-40fa-aca6-8fcad3ce0797 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.229084] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ec7c10-01ae-32a0-9ed2-7ae7dbec0b8c/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 811.229347] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Uploaded image 37d29dc5-a20b-4867-affe-5b6316438d64 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 811.231692] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 811.234916] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2d55b4d8-8ca0-4e47-989c-bac1f635b8e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.238031] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116466, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.244417] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Successfully updated port: 982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.246998] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 811.246998] env[62109]: value = "task-1116467" [ 811.246998] env[62109]: _type = "Task" [ 811.246998] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.261654] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116467, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.547438] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6de413f-0fe3-4ac3-b9f6-949d2b2f6629 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.551543] env[62109]: INFO nova.compute.manager [-] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Took 1.31 seconds to deallocate network for instance. [ 811.559996] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a841cb4-1fc0-4a6a-aa33-64634f4d8a6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.594566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d59d6ca-67d9-4183-8a43-21fcc77a92b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.603398] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7092132-82e7-4a3c-97ed-50de666841b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.618529] env[62109]: DEBUG nova.compute.provider_tree [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.730740] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116466, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.749131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.749368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.749502] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.760026] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116467, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.064416] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.158825] env[62109]: DEBUG nova.scheduler.client.report [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 812.159179] env[62109]: DEBUG nova.compute.provider_tree [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 82 to 83 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 812.159388] env[62109]: DEBUG nova.compute.provider_tree [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.192283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "c5c63ece-611d-45d1-a8e6-9327700f1563" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.192602] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.192822] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.193142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.193354] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.195997] env[62109]: INFO nova.compute.manager [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Terminating instance [ 812.199426] env[62109]: DEBUG nova.compute.manager [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 812.200091] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.200555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8af69f-5a29-4860-afdb-25dc611a12c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.215238] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 812.215658] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f104711-2c52-45ba-b309-197c42cda877 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.230178] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116466, 'name': Rename_Task, 'duration_secs': 1.214445} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.230480] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 812.230731] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3d20651-aced-4af9-9e24-8260e67af4d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.237469] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 812.237469] env[62109]: value = "task-1116469" [ 812.237469] env[62109]: _type = "Task" [ 812.237469] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.246032] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.261097] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116467, 'name': Destroy_Task, 'duration_secs': 0.721862} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.261885] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Destroyed the VM [ 812.262329] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 812.262394] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0c616db5-5f42-4a6d-ba1f-ff07c1ba6050 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.270320] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 812.270320] env[62109]: value = "task-1116470" [ 812.270320] env[62109]: _type = "Task" [ 812.270320] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.282484] env[62109]: DEBUG oslo_vmware.api [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116470, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.289956] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 812.290232] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 812.290428] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleting the datastore file [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.290730] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b97625d-d09b-40e2-9695-1e8ca152730b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.293539] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.301252] env[62109]: DEBUG oslo_vmware.api [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 812.301252] env[62109]: value = "task-1116471" [ 812.301252] env[62109]: _type = "Task" [ 812.301252] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.311185] env[62109]: DEBUG oslo_vmware.api [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.468369] env[62109]: DEBUG nova.network.neutron [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating instance_info_cache with network_info: [{"id": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "address": "fa:16:3e:ce:64:e3", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap982c79dd-f1", "ovs_interfaceid": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.665714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.856s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.666162] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 812.668933] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "0392a352-74e5-4551-9319-eebbc5e20d3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.669044] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.669440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.929s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.670759] env[62109]: INFO nova.compute.claims [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.706052] env[62109]: DEBUG nova.compute.manager [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Received event network-vif-plugged-982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 812.706052] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.706052] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.706052] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.706052] env[62109]: DEBUG nova.compute.manager [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] No waiting events found dispatching network-vif-plugged-982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 812.706227] env[62109]: WARNING nova.compute.manager [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Received unexpected event network-vif-plugged-982c79dd-f148-4cf1-af9e-f0ba120b13f2 for instance with vm_state building and task_state spawning. [ 812.706512] env[62109]: DEBUG nova.compute.manager [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Received event network-changed-982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 812.706811] env[62109]: DEBUG nova.compute.manager [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Refreshing instance network info cache due to event network-changed-982c79dd-f148-4cf1-af9e-f0ba120b13f2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 812.706956] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Acquiring lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.748545] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116469, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.781539] env[62109]: DEBUG nova.compute.utils [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Cleaning up image 37d29dc5-a20b-4867-affe-5b6316438d64 {{(pid=62109) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 812.813883] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563 [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Traceback (most recent call last): [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1114, in _destroy_instance [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] ds_util.file_delete(self._session, [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] session._wait_for_task(file_delete_task) [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] return self.wait_for_task(task_ref) [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] return evt.wait() [ 812.813883] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] result = hub.switch() [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] return self.greenlet.switch() [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] self.f(*self.args, **self.kw) [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] raise exceptions.translate_fault(task_info.error) [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] c5c63ece-611d-45d1-a8e6-9327700f1563 [ 812.814450] env[62109]: ERROR nova.virt.vmwareapi.vmops [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] [ 812.814450] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.814720] env[62109]: INFO nova.compute.manager [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Took 0.61 seconds to destroy the instance on the hypervisor. [ 812.814720] env[62109]: DEBUG oslo.service.loopingcall [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.814828] env[62109]: DEBUG nova.compute.manager [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.814887] env[62109]: DEBUG nova.network.neutron [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.971337] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.971684] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Instance network_info: |[{"id": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "address": "fa:16:3e:ce:64:e3", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap982c79dd-f1", "ovs_interfaceid": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 812.972020] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Acquired lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.972219] env[62109]: DEBUG nova.network.neutron [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Refreshing network info cache for port 982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 812.973482] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:64:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '982c79dd-f148-4cf1-af9e-f0ba120b13f2', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.981815] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Creating folder: Project (57a1783401e34096b84023fc70da3840). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 812.983059] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fd3a3e4-7c73-4a6a-b19c-3b9ce430c37a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.994880] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Created folder: Project (57a1783401e34096b84023fc70da3840) in parent group-v244329. [ 812.995150] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Creating folder: Instances. Parent ref: group-v244439. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 812.995363] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a55545cf-c6a0-4bda-9a57-2dad3c4f21b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.004327] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Created folder: Instances in parent group-v244439. [ 813.004572] env[62109]: DEBUG oslo.service.loopingcall [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.004786] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 813.005076] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66b99564-ca58-416c-b491-90b1589d4b5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.024054] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.024054] env[62109]: value = "task-1116474" [ 813.024054] env[62109]: _type = "Task" [ 813.024054] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.032763] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116474, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.171143] env[62109]: DEBUG nova.compute.utils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 813.172727] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 813.172918] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 813.222987] env[62109]: DEBUG nova.policy [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e608055854844801b9f7c51d07820917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca12aa68e4b4d4d8cf1e3332deb44f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 813.248570] env[62109]: DEBUG oslo_vmware.api [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116469, 'name': PowerOnVM_Task, 'duration_secs': 0.520214} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.248865] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 813.249795] env[62109]: INFO nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Took 9.10 seconds to spawn the instance on the hypervisor. [ 813.250074] env[62109]: DEBUG nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 813.250875] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab5e717-3202-4046-8d44-9373356a1af7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.536688] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116474, 'name': CreateVM_Task, 'duration_secs': 0.384354} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.536995] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 813.537817] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.538041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.539302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 813.539635] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de5690f-7f17-4785-a6b4-af2bcb4c4117 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.544496] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 813.544496] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d78cb3-78fb-0219-71c1-761bba137ce4" [ 813.544496] env[62109]: _type = "Task" [ 813.544496] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.548850] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Successfully created port: 3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 813.550678] env[62109]: DEBUG nova.network.neutron [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.556724] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d78cb3-78fb-0219-71c1-761bba137ce4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.676255] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 813.774768] env[62109]: INFO nova.compute.manager [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Took 39.34 seconds to build instance. [ 813.854097] env[62109]: DEBUG nova.network.neutron [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updated VIF entry in instance network info cache for port 982c79dd-f148-4cf1-af9e-f0ba120b13f2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 813.854247] env[62109]: DEBUG nova.network.neutron [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating instance_info_cache with network_info: [{"id": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "address": "fa:16:3e:ce:64:e3", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap982c79dd-f1", "ovs_interfaceid": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.053821] env[62109]: INFO nova.compute.manager [-] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Took 1.24 seconds to deallocate network for instance. [ 814.060527] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d78cb3-78fb-0219-71c1-761bba137ce4, 'name': SearchDatastore_Task, 'duration_secs': 0.012212} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.063921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.064197] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.064489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.064688] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.064910] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.068147] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-608b8df7-62e9-4934-ac7b-f585b26866e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.077207] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.077413] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 814.078370] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37d40573-329f-4fc1-a39e-4dbf016cc3ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.083733] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 814.083733] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5298e170-b84e-50b6-412a-abba6d92b1da" [ 814.083733] env[62109]: _type = "Task" [ 814.083733] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.096350] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5298e170-b84e-50b6-412a-abba6d92b1da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.163811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673c0e87-8bbe-45af-aa17-235b8cc7a1ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.172719] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5729f8-2bcb-4b1e-9d63-99c3b87a016b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.210627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9482d2-f17a-48ea-a819-d8f60ad588a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.220217] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18b8a7a-29af-4f21-92ef-653be1b3ed2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.236297] env[62109]: DEBUG nova.compute.provider_tree [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.277720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1387ae38-bd93-488d-a791-ad90fb095eb3 tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.351s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.309022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.359646] env[62109]: DEBUG oslo_concurrency.lockutils [req-4ebe4131-86d2-4486-82df-ecf8b43c058d req-d761c5f1-b927-4fd0-b087-7ea1c5862bb5 service nova] Releasing lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.569452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.596977] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5298e170-b84e-50b6-412a-abba6d92b1da, 'name': SearchDatastore_Task, 'duration_secs': 0.012977} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.597791] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d797f419-6dd1-4be4-bdb3-f760456b8dfa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.603110] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 814.603110] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520d6a35-c682-70a8-ae8d-ce222e534c9b" [ 814.603110] env[62109]: _type = "Task" [ 814.603110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.610785] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520d6a35-c682-70a8-ae8d-ce222e534c9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.715514] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 814.739201] env[62109]: DEBUG nova.scheduler.client.report [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 814.744343] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 814.744589] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 814.744750] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.744941] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 814.745107] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.745264] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 814.745476] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 814.745641] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 814.745817] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 814.745976] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 814.746183] env[62109]: DEBUG nova.virt.hardware [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 814.748154] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e7a761-b464-4b3b-922e-db6b24f68ccb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.752070] env[62109]: DEBUG nova.compute.manager [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Received event network-vif-deleted-b97f6c74-b63d-475c-93d8-e340e00f169c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.752285] env[62109]: DEBUG nova.compute.manager [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Received event network-changed-f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.752451] env[62109]: DEBUG nova.compute.manager [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Refreshing instance network info cache due to event network-changed-f5f52514-4146-44d3-9e0e-5ee87f782604. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 814.752660] env[62109]: DEBUG oslo_concurrency.lockutils [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] Acquiring lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.753018] env[62109]: DEBUG oslo_concurrency.lockutils [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] Acquired lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.753018] env[62109]: DEBUG nova.network.neutron [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Refreshing network info cache for port f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 814.761991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af8e16e-8e3b-4898-8827-abc2a067add0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.780377] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 815.111520] env[62109]: DEBUG nova.compute.manager [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Received event network-vif-plugged-3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 815.111747] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] Acquiring lock "c694c178-3894-4997-8e99-8f4900a64848-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.112018] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] Lock "c694c178-3894-4997-8e99-8f4900a64848-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.112168] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] Lock "c694c178-3894-4997-8e99-8f4900a64848-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.112403] env[62109]: DEBUG nova.compute.manager [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] No waiting events found dispatching network-vif-plugged-3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 815.112533] env[62109]: WARNING nova.compute.manager [req-a2843b78-aab7-4534-951a-846709f74008 req-be477ea1-40e7-4dba-b2cd-7d5a9e6ffdc0 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Received unexpected event network-vif-plugged-3d487765-cb55-45bd-b4f2-b2cddcf12cfd for instance with vm_state building and task_state spawning. [ 815.117813] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520d6a35-c682-70a8-ae8d-ce222e534c9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010632} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.117813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.117813] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 342b7069-22fb-4934-9ec3-8ecbc987696e/342b7069-22fb-4934-9ec3-8ecbc987696e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 815.117813] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b87a7064-a5d3-4994-a479-548264576952 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.125117] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 815.125117] env[62109]: value = "task-1116475" [ 815.125117] env[62109]: _type = "Task" [ 815.125117] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.132574] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.256114] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.256756] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 815.262179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 31.380s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.302903] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.523540] env[62109]: DEBUG nova.network.neutron [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updated VIF entry in instance network info cache for port f5f52514-4146-44d3-9e0e-5ee87f782604. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 815.523931] env[62109]: DEBUG nova.network.neutron [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updating instance_info_cache with network_info: [{"id": "f5f52514-4146-44d3-9e0e-5ee87f782604", "address": "fa:16:3e:b3:a9:5a", "network": {"id": "6370db82-9650-49a7-8ebf-5c3d3e0c00df", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1066537298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b70ad018a9443888f3bd51b32782554", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f52514-41", "ovs_interfaceid": "f5f52514-4146-44d3-9e0e-5ee87f782604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.636657] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116475, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500369} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.637010] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 342b7069-22fb-4934-9ec3-8ecbc987696e/342b7069-22fb-4934-9ec3-8ecbc987696e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 815.637248] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.637584] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e631b106-0b73-4ecd-97a9-06964b8ce028 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.644982] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 815.644982] env[62109]: value = "task-1116476" [ 815.644982] env[62109]: _type = "Task" [ 815.644982] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.654161] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116476, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.678780] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Successfully updated port: 3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.775130] env[62109]: DEBUG nova.compute.utils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.776641] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 815.776907] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 815.817042] env[62109]: DEBUG nova.policy [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c49484657292430db0569311e6a5cc46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45ad1c039aa9463e977cf986ce4dccf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 816.026881] env[62109]: DEBUG oslo_concurrency.lockutils [req-d64d7ff0-88c9-4fe4-a40f-c9baa58e1ba7 req-da89faa8-243b-4bd9-8d64-ae4eb39a84b6 service nova] Releasing lock "refresh_cache-2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.072570] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Successfully created port: a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.156080] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116476, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073989} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.156376] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.157262] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73540da4-296a-4526-9bc8-11fba5312449 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.181554] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 342b7069-22fb-4934-9ec3-8ecbc987696e/342b7069-22fb-4934-9ec3-8ecbc987696e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.182872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.182872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.182872] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.183990] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe3250f7-36c8-4000-ac9d-fa62c8e79c58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.206646] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 816.206646] env[62109]: value = "task-1116477" [ 816.206646] env[62109]: _type = "Task" [ 816.206646] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.215961] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116477, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.280112] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 816.717594] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116477, 'name': ReconfigVM_Task, 'duration_secs': 0.429009} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.717908] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 342b7069-22fb-4934-9ec3-8ecbc987696e/342b7069-22fb-4934-9ec3-8ecbc987696e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.718708] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ff9948e-f239-4040-adb0-56505fd493e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.726144] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 816.726144] env[62109]: value = "task-1116478" [ 816.726144] env[62109]: _type = "Task" [ 816.726144] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.734800] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116478, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.735692] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.785327] env[62109]: INFO nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating resource usage from migration a7f68b5f-2580-46ec-a485-50b41584f55e [ 816.813326] env[62109]: DEBUG nova.compute.manager [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Received event network-changed-3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.813326] env[62109]: DEBUG nova.compute.manager [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Refreshing instance network info cache due to event network-changed-3d487765-cb55-45bd-b4f2-b2cddcf12cfd. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 816.813326] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] Acquiring lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.813725] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.813857] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c753a2db-d701-4508-88bd-4ebe4f32a075 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.814056] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1399f618-3a93-4731-a59b-f98306d6cd52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.814207] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 55381bef-dab5-44cd-97fe-9fc75ab61d0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.814346] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a9fb75d5-e303-4f31-888d-528963ab23b7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 816.814456] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 028300fd-f9f8-4606-a39e-53582f830eeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.814575] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8b6ec904-8c68-4eaa-94fe-47a87528e26b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 816.814713] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.814866] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7f40cdc8-3421-47b7-b148-ff6417105dbb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 816.814986] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.815119] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.815298] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.815513] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c5c63ece-611d-45d1-a8e6-9327700f1563 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 816.817509] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 9b2968bb-ed06-4740-b43e-b4aa1fac76dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817509] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 59f6adc7-d491-4a86-83f7-89128511e00f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817509] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7afbb35b-9865-40a7-8b37-d6a661a186a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817509] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817749] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 342b7069-22fb-4934-9ec3-8ecbc987696e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817749] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c694c178-3894-4997-8e99-8f4900a64848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.817749] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance af3465db-fd56-458d-a499-14df3a0029f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 816.899421] env[62109]: DEBUG nova.network.neutron [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Updating instance_info_cache with network_info: [{"id": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "address": "fa:16:3e:68:1a:10", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d487765-cb", "ovs_interfaceid": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.235763] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116478, 'name': Rename_Task, 'duration_secs': 0.224109} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.236193] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 817.236510] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b27a7c74-d540-478a-b562-62d45f97b912 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.243083] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 817.243083] env[62109]: value = "task-1116479" [ 817.243083] env[62109]: _type = "Task" [ 817.243083] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.251286] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.293790] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 817.319790] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 5d656f91-d35f-45e1-8892-7cdacd306960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.319790] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Migration a7f68b5f-2580-46ec-a485-50b41584f55e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 817.319790] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 32cccd30-278c-48b6-8855-5cd76c2da057 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 817.323105] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.323538] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.323538] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.323670] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.323829] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.323983] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.324216] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.324384] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.324558] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.324734] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.324952] env[62109]: DEBUG nova.virt.hardware [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.325970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03afd51d-f941-40be-a080-cd6156ef5729 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.335598] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b65340-1496-4fd1-8088-5779e962a8d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.403664] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.404027] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance network_info: |[{"id": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "address": "fa:16:3e:68:1a:10", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d487765-cb", "ovs_interfaceid": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 817.404366] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] Acquired lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.404559] env[62109]: DEBUG nova.network.neutron [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Refreshing network info cache for port 3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.405829] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:1a:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d487765-cb55-45bd-b4f2-b2cddcf12cfd', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.413841] env[62109]: DEBUG oslo.service.loopingcall [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.415139] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 817.415385] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7b4bf34-9f91-4960-a656-5092a1b29aa0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.436744] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.436744] env[62109]: value = "task-1116480" [ 817.436744] env[62109]: _type = "Task" [ 817.436744] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.445077] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116480, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.566628] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Successfully updated port: a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.753720] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116479, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.830041] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7ace6356-1a81-4095-8286-c9b6d829062b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.948243] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116480, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.073190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.073462] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.073552] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 818.124132] env[62109]: DEBUG nova.network.neutron [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Updated VIF entry in instance network info cache for port 3d487765-cb55-45bd-b4f2-b2cddcf12cfd. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 818.124710] env[62109]: DEBUG nova.network.neutron [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Updating instance_info_cache with network_info: [{"id": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "address": "fa:16:3e:68:1a:10", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d487765-cb", "ovs_interfaceid": "3d487765-cb55-45bd-b4f2-b2cddcf12cfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.210016] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 818.211079] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89d2fe7-9c3d-4a1a-91e1-708bc224de68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.217123] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 818.217298] env[62109]: ERROR oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk due to incomplete transfer. [ 818.217511] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-79e5e114-100b-4658-8c07-49b06ab181be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.223848] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232184a-a65a-97d7-2e65-1c4012db9c73/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 818.224059] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Uploaded image cb79f804-aa22-44e6-95cf-7d3baf7cae8b to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 818.225725] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 818.225953] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c580c6f8-272f-4bde-9c84-d0fc8c86c641 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.232315] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 818.232315] env[62109]: value = "task-1116481" [ 818.232315] env[62109]: _type = "Task" [ 818.232315] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.240413] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116481, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.251960] env[62109]: DEBUG oslo_vmware.api [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116479, 'name': PowerOnVM_Task, 'duration_secs': 0.687659} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.252265] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 818.252487] env[62109]: INFO nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Took 7.41 seconds to spawn the instance on the hypervisor. [ 818.252695] env[62109]: DEBUG nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 818.253454] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252fa5de-31e9-4d2a-83bd-1509fcf79645 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.333937] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.447102] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116480, 'name': CreateVM_Task, 'duration_secs': 0.537185} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.447311] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 818.447982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.448173] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.448498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.448753] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1f5094-5219-4b95-b2cd-5f16d590db3e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.454107] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 818.454107] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a92080-1e2d-ccf0-a564-87fbb0a53051" [ 818.454107] env[62109]: _type = "Task" [ 818.454107] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.462063] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a92080-1e2d-ccf0-a564-87fbb0a53051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.603174] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.627486] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce3fef3e-77b0-48d6-bc66-95bb5e361107 req-22943ef0-4688-473d-8d0e-86bbed5474c6 service nova] Releasing lock "refresh_cache-c694c178-3894-4997-8e99-8f4900a64848" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.721847] env[62109]: DEBUG nova.network.neutron [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updating instance_info_cache with network_info: [{"id": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "address": "fa:16:3e:6c:a1:54", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33eb191-a4", "ovs_interfaceid": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.744175] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116481, 'name': Destroy_Task, 'duration_secs': 0.330824} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.744488] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Destroyed the VM [ 818.744741] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 818.744983] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e24e09cd-f7b1-40fc-a637-822292f047fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.756200] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 818.756200] env[62109]: value = "task-1116482" [ 818.756200] env[62109]: _type = "Task" [ 818.756200] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.769108] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116482, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.773084] env[62109]: INFO nova.compute.manager [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Took 40.18 seconds to build instance. [ 818.837174] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.841206] env[62109]: DEBUG nova.compute.manager [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Received event network-vif-plugged-a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.842413] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Acquiring lock "af3465db-fd56-458d-a499-14df3a0029f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.842413] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Lock "af3465db-fd56-458d-a499-14df3a0029f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.842413] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Lock "af3465db-fd56-458d-a499-14df3a0029f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.842413] env[62109]: DEBUG nova.compute.manager [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] No waiting events found dispatching network-vif-plugged-a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 818.842413] env[62109]: WARNING nova.compute.manager [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Received unexpected event network-vif-plugged-a33eb191-a49c-4a63-8f1a-569b4fcbc346 for instance with vm_state building and task_state spawning. [ 818.842631] env[62109]: DEBUG nova.compute.manager [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Received event network-changed-a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.842631] env[62109]: DEBUG nova.compute.manager [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Refreshing instance network info cache due to event network-changed-a33eb191-a49c-4a63-8f1a-569b4fcbc346. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 818.842631] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Acquiring lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.964816] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a92080-1e2d-ccf0-a564-87fbb0a53051, 'name': SearchDatastore_Task, 'duration_secs': 0.011136} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.965294] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.965416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.965659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.965809] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.966017] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 818.966283] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00ab790c-0104-4db7-8261-f888ea8dac5c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.974699] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 818.974932] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 818.975699] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11b4ed25-bcad-4781-8897-7aa17d5d997a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.984358] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 818.984358] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52620feb-4cc6-d80e-0de3-433909a34eaf" [ 818.984358] env[62109]: _type = "Task" [ 818.984358] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.989225] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52620feb-4cc6-d80e-0de3-433909a34eaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.224254] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.224617] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Instance network_info: |[{"id": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "address": "fa:16:3e:6c:a1:54", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33eb191-a4", "ovs_interfaceid": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 819.224973] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Acquired lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.225177] env[62109]: DEBUG nova.network.neutron [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Refreshing network info cache for port a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 819.226625] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:a1:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1ce8361b-fd8e-4971-a37f-b84a4f77db19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a33eb191-a49c-4a63-8f1a-569b4fcbc346', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.234624] env[62109]: DEBUG oslo.service.loopingcall [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.237105] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 819.237596] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2520d6a1-aade-4176-8b5a-ed46345df18a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.258104] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.258104] env[62109]: value = "task-1116483" [ 819.258104] env[62109]: _type = "Task" [ 819.258104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.268735] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116482, 'name': RemoveSnapshot_Task, 'duration_secs': 0.380425} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.271817] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 819.271949] env[62109]: DEBUG nova.compute.manager [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 819.272177] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116483, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.272843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d323c545-1994-4247-a232-5b3ccb6a2b31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.275451] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0f56957a-d4a3-4c9d-acec-9a46da8c6585 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.198s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.340409] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance b1321874-8f97-4444-9f9c-d586d51a9e92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.503247] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52620feb-4cc6-d80e-0de3-433909a34eaf, 'name': SearchDatastore_Task, 'duration_secs': 0.010578} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.503247] env[62109]: DEBUG nova.network.neutron [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updated VIF entry in instance network info cache for port a33eb191-a49c-4a63-8f1a-569b4fcbc346. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 819.503335] env[62109]: DEBUG nova.network.neutron [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updating instance_info_cache with network_info: [{"id": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "address": "fa:16:3e:6c:a1:54", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33eb191-a4", "ovs_interfaceid": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.503335] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae1a29e4-ed57-4b6c-9331-505873973d78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.511365] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 819.511365] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52efd78c-61b4-6c36-2586-e0d95f58107f" [ 819.511365] env[62109]: _type = "Task" [ 819.511365] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.524248] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52efd78c-61b4-6c36-2586-e0d95f58107f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.772025] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116483, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.777669] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 819.787639] env[62109]: INFO nova.compute.manager [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Shelve offloading [ 819.791666] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 819.791666] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ee6abc6-eaaa-43ea-b866-ce106e1f5c70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.799841] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 819.799841] env[62109]: value = "task-1116484" [ 819.799841] env[62109]: _type = "Task" [ 819.799841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.810198] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 819.810534] env[62109]: DEBUG nova.compute.manager [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 819.811497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9d53a5-733a-40ef-b23c-2effc1cb3f7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.818828] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.819182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.819449] env[62109]: DEBUG nova.network.neutron [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.846425] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance d7d1029c-9b7c-4bd7-b606-a1962a129461 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 820.002143] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e208c9e-ba5c-4b2b-a2b6-5af7f466cbfc req-2337e6e2-91e5-470e-bd5c-0a303a5fe7cf service nova] Releasing lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.033323] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52efd78c-61b4-6c36-2586-e0d95f58107f, 'name': SearchDatastore_Task, 'duration_secs': 0.010268} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.033623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.033887] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 820.034176] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7f4ea00-2792-4acc-92b7-b7d763ad35bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.041802] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 820.041802] env[62109]: value = "task-1116485" [ 820.041802] env[62109]: _type = "Task" [ 820.041802] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.050695] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.277189] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116483, 'name': CreateVM_Task, 'duration_secs': 0.592075} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.277189] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 820.277189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.277189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.277189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 820.277189] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f224131a-e2c8-43ef-9d7c-f376fd3e9f07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.286686] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 820.286686] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523a3d04-9dad-0ae0-3cae-a6a3e0062439" [ 820.286686] env[62109]: _type = "Task" [ 820.286686] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.301597] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523a3d04-9dad-0ae0-3cae-a6a3e0062439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.316197] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.351860] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0f197e98-9630-4928-8707-56bbf6c1e5a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 820.560467] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464724} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.560815] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 820.561121] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.561355] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a9b31cd-0119-45ba-8054-3e7e2e8df797 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.571742] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 820.571742] env[62109]: value = "task-1116486" [ 820.571742] env[62109]: _type = "Task" [ 820.571742] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.583321] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.590125] env[62109]: DEBUG nova.compute.manager [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 820.590622] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0eb142a-e5b6-4722-ba81-05938dde286d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.599179] env[62109]: DEBUG nova.network.neutron [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updating instance_info_cache with network_info: [{"id": "e13a10c2-836a-412a-b1af-974b816d3971", "address": "fa:16:3e:09:0a:3a", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a10c2-83", "ovs_interfaceid": "e13a10c2-836a-412a-b1af-974b816d3971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.797328] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523a3d04-9dad-0ae0-3cae-a6a3e0062439, 'name': SearchDatastore_Task, 'duration_secs': 0.017052} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.797909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.797909] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.798186] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.798375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.798516] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.798774] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98cdb8a0-5e3d-4c20-9f58-13cf028abce9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.806706] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.806915] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 820.807632] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b81ca7b-ccf9-45e5-ad2e-2835e2c15f38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.812781] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 820.812781] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52087b99-e88e-4e33-2b7b-74baacfc5640" [ 820.812781] env[62109]: _type = "Task" [ 820.812781] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.821029] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52087b99-e88e-4e33-2b7b-74baacfc5640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.854909] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0392a352-74e5-4551-9319-eebbc5e20d3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 820.855271] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 820.855396] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 820.880173] env[62109]: DEBUG nova.compute.manager [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Received event network-changed-982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.880466] env[62109]: DEBUG nova.compute.manager [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Refreshing instance network info cache due to event network-changed-982c79dd-f148-4cf1-af9e-f0ba120b13f2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.880790] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] Acquiring lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.881020] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] Acquired lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.881260] env[62109]: DEBUG nova.network.neutron [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Refreshing network info cache for port 982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 820.905482] env[62109]: INFO nova.compute.manager [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Rebuilding instance [ 820.951703] env[62109]: DEBUG nova.compute.manager [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 820.952649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc891e7-290e-456e-9316-d40aa2d72e2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.084104] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079349} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.084196] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.085155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d860fdc7-85ed-47ba-a619-36aaa39a4471 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.109694] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.114005] env[62109]: INFO nova.compute.manager [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] instance snapshotting [ 821.114910] env[62109]: DEBUG nova.objects.instance [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.116666] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.118404] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81866dbe-515e-415e-acb1-23c8c42d8b2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.136678] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761655b1-fb97-4e33-a411-9d64eacd7418 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.141027] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 821.141027] env[62109]: value = "task-1116487" [ 821.141027] env[62109]: _type = "Task" [ 821.141027] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.163202] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52cbded-fd3e-4b0a-b35a-1d19110c3e6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.169180] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.302886] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e377bd0-53c8-4b76-9fd9-6d7cfb1e95a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.311035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a553d551-aade-4d89-8487-1e6e02da45f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.322626] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52087b99-e88e-4e33-2b7b-74baacfc5640, 'name': SearchDatastore_Task, 'duration_secs': 0.009467} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.347900] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca932881-2a10-4672-bd0f-2e3a6948489b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.351145] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b74507-304d-4a89-a519-d5d3e300b912 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.356683] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 821.356683] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522f3fe6-0216-02b0-4f2f-830fa5bc38de" [ 821.356683] env[62109]: _type = "Task" [ 821.356683] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.363145] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d822723-43e8-4f9e-8ba7-02f8d86e93db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.372349] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522f3fe6-0216-02b0-4f2f-830fa5bc38de, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.379881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.380167] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/af3465db-fd56-458d-a499-14df3a0029f0.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 821.380598] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.381671] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dc2e095-3817-4c27-a30e-592486ea1816 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.390113] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 821.390113] env[62109]: value = "task-1116488" [ 821.390113] env[62109]: _type = "Task" [ 821.390113] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.400640] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.450129] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 821.450323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b33f5eb-76e9-4172-9e72-c405643b4ab0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.458448] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 821.458775] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2d65f34-43e2-497a-9764-f034c37c160f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.466234] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 821.466234] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2b0604b-cc96-4c50-b724-1e4d5bdb678d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.471149] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 821.471149] env[62109]: value = "task-1116490" [ 821.471149] env[62109]: _type = "Task" [ 821.471149] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.479430] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.545347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 821.545645] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 821.545854] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleting the datastore file [datastore2] 59f6adc7-d491-4a86-83f7-89128511e00f {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.546139] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e820c16c-957a-4476-a87f-a5696ce38360 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.554367] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 821.554367] env[62109]: value = "task-1116491" [ 821.554367] env[62109]: _type = "Task" [ 821.554367] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.563835] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.653058] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.681192] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 821.681721] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-34f94a09-5444-438d-977d-79f8969bdd56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.684753] env[62109]: DEBUG nova.network.neutron [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updated VIF entry in instance network info cache for port 982c79dd-f148-4cf1-af9e-f0ba120b13f2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 821.685172] env[62109]: DEBUG nova.network.neutron [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating instance_info_cache with network_info: [{"id": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "address": "fa:16:3e:ce:64:e3", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap982c79dd-f1", "ovs_interfaceid": "982c79dd-f148-4cf1-af9e-f0ba120b13f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.692038] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 821.692038] env[62109]: value = "task-1116492" [ 821.692038] env[62109]: _type = "Task" [ 821.692038] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.701879] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116492, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.884982] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.912053] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507586} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.912494] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/af3465db-fd56-458d-a499-14df3a0029f0.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 821.912605] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.912788] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-368115c6-a50a-4c14-9167-fe55c5e45406 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.919574] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 821.919574] env[62109]: value = "task-1116493" [ 821.919574] env[62109]: _type = "Task" [ 821.919574] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.928392] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.981162] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116490, 'name': PowerOffVM_Task, 'duration_secs': 0.360717} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.981447] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 821.982179] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 821.982446] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0204ad8f-ce39-469a-a03c-512300579990 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.990533] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 821.990533] env[62109]: value = "task-1116494" [ 821.990533] env[62109]: _type = "Task" [ 821.990533] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.998739] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.065010] env[62109]: DEBUG oslo_vmware.api [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.458249} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.065294] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 822.065485] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 822.065668] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 822.087408] env[62109]: INFO nova.scheduler.client.report [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 59f6adc7-d491-4a86-83f7-89128511e00f [ 822.155366] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116487, 'name': ReconfigVM_Task, 'duration_secs': 0.64847} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.155760] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Reconfigured VM instance instance-0000003c to attach disk [datastore1] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.156621] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2b1ec5f-c198-4f57-aabb-088bcd7ce111 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.164345] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 822.164345] env[62109]: value = "task-1116495" [ 822.164345] env[62109]: _type = "Task" [ 822.164345] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.180566] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116495, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.188907] env[62109]: DEBUG oslo_concurrency.lockutils [req-f6048b82-a560-4460-a94e-48a3a5bcf882 req-6aa58a0d-47b8-4701-b38a-ea54d5a4359f service nova] Releasing lock "refresh_cache-342b7069-22fb-4934-9ec3-8ecbc987696e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.200641] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116492, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.390820] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 822.391089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.129s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.391392] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.342s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.392967] env[62109]: INFO nova.compute.claims [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.395700] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.395843] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Cleaning up deleted instances {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 822.428945] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.268577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.431038] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.431491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae2f937-3211-4e6b-bbbc-fa713b4fa58a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.453636] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/af3465db-fd56-458d-a499-14df3a0029f0.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.454164] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce5ff51-b763-429b-a49d-bd8506aab35e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.473891] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 822.473891] env[62109]: value = "task-1116496" [ 822.473891] env[62109]: _type = "Task" [ 822.473891] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.481810] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.500061] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 822.500282] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 822.500485] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244340', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'name': 'volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a24f2349-7c1b-441d-a36e-b16dd61f6031', 'attached_at': '', 'detached_at': '', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'serial': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 822.501370] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66306898-0a81-4a74-8992-02ce0be7da98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.519357] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f99449-32c2-4205-a5b0-0a0a3999a715 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.526457] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f024cc7-f6ff-4002-925c-073c078e3ba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.544181] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea707125-18b2-44a4-938c-4e962817218c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.561094] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] The volume has not been displaced from its original location: [datastore2] volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61/volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 822.566326] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 822.566745] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dc8e6b5-094e-453a-a0cc-9e64b26c1543 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.584491] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 822.584491] env[62109]: value = "task-1116497" [ 822.584491] env[62109]: _type = "Task" [ 822.584491] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.592879] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.593158] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116497, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.674533] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116495, 'name': Rename_Task, 'duration_secs': 0.352517} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.674861] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 822.675216] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-739560c0-7c19-4b50-98bf-cd19eb862288 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.681431] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 822.681431] env[62109]: value = "task-1116498" [ 822.681431] env[62109]: _type = "Task" [ 822.681431] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.695059] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.702103] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116492, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.903460] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] There are 11 instances to clean {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 822.904600] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c44d618e-c781-47ba-b191-cecc01dcfe9b] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 822.984272] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.035509] env[62109]: DEBUG nova.compute.manager [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received event network-vif-unplugged-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.035745] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.035989] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.036207] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.036394] env[62109]: DEBUG nova.compute.manager [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] No waiting events found dispatching network-vif-unplugged-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 823.036612] env[62109]: DEBUG nova.compute.manager [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received event network-vif-unplugged-e13a10c2-836a-412a-b1af-974b816d3971 for instance with task_state deleting. {{(pid=62109) _process_instance_event /opt/stack/nova/nova/compute/manager.py:10909}} [ 823.036874] env[62109]: DEBUG nova.compute.manager [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Received event network-changed-e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.037070] env[62109]: DEBUG nova.compute.manager [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Refreshing instance network info cache due to event network-changed-e13a10c2-836a-412a-b1af-974b816d3971. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 823.037288] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Acquiring lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.037449] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Acquired lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.037617] env[62109]: DEBUG nova.network.neutron [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Refreshing network info cache for port e13a10c2-836a-412a-b1af-974b816d3971 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 823.095141] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116497, 'name': ReconfigVM_Task, 'duration_secs': 0.190365} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.095523] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 823.100307] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cf3f285-5fa7-44a5-bb28-8bd41c3a12f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.115372] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 823.115372] env[62109]: value = "task-1116499" [ 823.115372] env[62109]: _type = "Task" [ 823.115372] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.125613] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116499, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.191146] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116498, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.201877] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116492, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.410031] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c90ace77-5b8b-4b04-aa57-d47ad17df01e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 823.485042] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116496, 'name': ReconfigVM_Task, 'duration_secs': 0.862065} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.487616] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfigured VM instance instance-0000003d to attach disk [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/af3465db-fd56-458d-a499-14df3a0029f0.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.488425] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dce3470d-e0de-4cdd-a1db-c1f7af88f74f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.494511] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 823.494511] env[62109]: value = "task-1116500" [ 823.494511] env[62109]: _type = "Task" [ 823.494511] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.504131] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116500, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.509227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.628846] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116499, 'name': ReconfigVM_Task, 'duration_secs': 0.131494} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.629249] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244340', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'name': 'volume-8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a24f2349-7c1b-441d-a36e-b16dd61f6031', 'attached_at': '', 'detached_at': '', 'volume_id': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61', 'serial': '8845b7d9-9e50-489a-94dd-17e0fa9b7a61'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 823.629554] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 823.630348] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ebdf55-9796-42f6-ab50-87949948d9d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.637144] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 823.640460] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80cc6fbd-ae70-4046-93eb-1f2bbafa56ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.692058] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116498, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.709515] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116492, 'name': CreateSnapshot_Task, 'duration_secs': 1.638068} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.712587] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 823.712881] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 823.713091] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 823.713275] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Deleting the datastore file [datastore2] a24f2349-7c1b-441d-a36e-b16dd61f6031 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.714252] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5248bdd6-df79-4a08-99e9-48eefdcd6c12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.717029] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb67d47e-1d1e-4ad3-8373-f70b5f0a3f85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.729297] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for the task: (returnval){ [ 823.729297] env[62109]: value = "task-1116502" [ 823.729297] env[62109]: _type = "Task" [ 823.729297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.739856] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.783728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a4380e-2413-4fd8-adef-a77e64e03cf9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.787651] env[62109]: DEBUG nova.network.neutron [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updated VIF entry in instance network info cache for port e13a10c2-836a-412a-b1af-974b816d3971. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 823.788023] env[62109]: DEBUG nova.network.neutron [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updating instance_info_cache with network_info: [{"id": "e13a10c2-836a-412a-b1af-974b816d3971", "address": "fa:16:3e:09:0a:3a", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": null, "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape13a10c2-83", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.793443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f790ee92-d185-4aef-84cb-7aff184a31b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.825804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f19b6da-b4bf-4653-aa95-1ff923e50627 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.833524] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc56a60-0808-489b-957d-e3a7f215aafa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.849376] env[62109]: DEBUG nova.compute.provider_tree [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.912449] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 8584eb2c-57a3-455e-9d3c-877286e23ccc] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 824.004087] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116500, 'name': Rename_Task, 'duration_secs': 0.256166} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.004771] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 824.005115] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45e0dc19-c1e8-4c0b-a8b8-73b6243b34f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.012333] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 824.012333] env[62109]: value = "task-1116503" [ 824.012333] env[62109]: _type = "Task" [ 824.012333] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.020346] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.191949] env[62109]: DEBUG oslo_vmware.api [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116498, 'name': PowerOnVM_Task, 'duration_secs': 1.045636} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.192291] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 824.192501] env[62109]: INFO nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Took 9.48 seconds to spawn the instance on the hypervisor. [ 824.192681] env[62109]: DEBUG nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 824.193442] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b678a52-b058-440f-9692-bf9b879d1155 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.237387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 824.237950] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7e2237a3-c2bc-43e9-ae95-f609fb8bb4b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.249772] env[62109]: DEBUG oslo_vmware.api [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Task: {'id': task-1116502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096364} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.250847] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.251054] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 824.251238] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 824.252776] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 824.252776] env[62109]: value = "task-1116504" [ 824.252776] env[62109]: _type = "Task" [ 824.252776] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.262078] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116504, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.290336] env[62109]: DEBUG oslo_concurrency.lockutils [req-c93f9e35-39e1-4d34-9cc9-38d9d2fdc69a req-c0b7194c-4951-4e0a-a8e9-6a536c7127c3 service nova] Releasing lock "refresh_cache-59f6adc7-d491-4a86-83f7-89128511e00f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.305670] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 824.306156] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba774910-9fc5-4306-b456-3e9f6b345542 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.315054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964d6fe0-01be-47b3-89b1-987051566d31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.350977] env[62109]: ERROR nova.compute.manager [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Failed to detach volume 8845b7d9-9e50-489a-94dd-17e0fa9b7a61 from /dev/sda: nova.exception.InstanceNotFound: Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 could not be found. [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Traceback (most recent call last): [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self.driver.rebuild(**kwargs) [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise NotImplementedError() [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] NotImplementedError [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] During handling of the above exception, another exception occurred: [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Traceback (most recent call last): [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 824.350977] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self.driver.detach_volume(context, old_connection_info, [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] return self._volumeops.detach_volume(connection_info, instance) [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._detach_volume_vmdk(connection_info, instance) [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] stable_ref.fetch_moref(session) [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise exception.InstanceNotFound(instance_id=self._uuid) [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] nova.exception.InstanceNotFound: Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 could not be found. [ 824.351558] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.355163] env[62109]: DEBUG nova.scheduler.client.report [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.415843] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1aaa9eae-9183-49d7-a452-4345ad2a9aa0] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 824.500201] env[62109]: DEBUG nova.compute.utils [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Build of instance a24f2349-7c1b-441d-a36e-b16dd61f6031 aborted: Failed to rebuild volume backed instance. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 824.501739] env[62109]: ERROR nova.compute.manager [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a24f2349-7c1b-441d-a36e-b16dd61f6031 aborted: Failed to rebuild volume backed instance. [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Traceback (most recent call last): [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self.driver.rebuild(**kwargs) [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise NotImplementedError() [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] NotImplementedError [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] During handling of the above exception, another exception occurred: [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Traceback (most recent call last): [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 824.501739] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._detach_root_volume(context, instance, root_bdm) [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] with excutils.save_and_reraise_exception(): [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self.force_reraise() [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise self.value [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self.driver.detach_volume(context, old_connection_info, [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] return self._volumeops.detach_volume(connection_info, instance) [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 824.502274] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._detach_volume_vmdk(connection_info, instance) [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] stable_ref.fetch_moref(session) [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise exception.InstanceNotFound(instance_id=self._uuid) [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] nova.exception.InstanceNotFound: Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 could not be found. [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] During handling of the above exception, another exception occurred: [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Traceback (most recent call last): [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] yield [ 824.502533] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._do_rebuild_instance_with_claim( [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._do_rebuild_instance( [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._rebuild_default_impl(**kwargs) [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] self._rebuild_volume_backed_instance( [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] raise exception.BuildAbortException( [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] nova.exception.BuildAbortException: Build of instance a24f2349-7c1b-441d-a36e-b16dd61f6031 aborted: Failed to rebuild volume backed instance. [ 824.502857] env[62109]: ERROR nova.compute.manager [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] [ 824.522698] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116503, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.712273] env[62109]: INFO nova.compute.manager [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Took 41.96 seconds to build instance. [ 824.763101] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116504, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.858934] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.859509] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.862753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.841s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.920088] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 8d9a7696-0465-4895-9ce8-4b4b8b2ca59e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 825.023701] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116503, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.214310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-23705528-804c-45b7-a882-3a047f4e05a8 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.732s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.263316] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116504, 'name': CloneVM_Task} progress is 95%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.367077] env[62109]: DEBUG nova.compute.utils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.371261] env[62109]: INFO nova.compute.claims [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.375741] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 825.376957] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 825.419672] env[62109]: DEBUG nova.policy [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.422742] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: f6d3a50c-bcc3-4a6f-969f-4e629646f427] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 825.523207] env[62109]: DEBUG oslo_vmware.api [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116503, 'name': PowerOnVM_Task, 'duration_secs': 1.380408} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.523485] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 825.523753] env[62109]: INFO nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Took 8.23 seconds to spawn the instance on the hypervisor. [ 825.523864] env[62109]: DEBUG nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.524646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f6b672-b887-4f02-a2a1-4b1f94b78ccb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.680457] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Successfully created port: 56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.722691] env[62109]: INFO nova.compute.manager [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Rebuilding instance [ 825.763059] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116504, 'name': CloneVM_Task, 'duration_secs': 1.295898} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.763340] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created linked-clone VM from snapshot [ 825.764124] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdfb226-bd73-4510-944f-8f2254531684 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.774479] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploading image 871af3c4-4325-4264-8f1d-6cfc2a52477d {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 825.776766] env[62109]: DEBUG nova.compute.manager [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.777258] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d38a625-7bfc-41f9-a24e-baa3ea181af9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.801698] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 825.801698] env[62109]: value = "vm-244445" [ 825.801698] env[62109]: _type = "VirtualMachine" [ 825.801698] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 825.802042] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-120f7ecb-96dc-49b8-86e5-2b5425820dee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.809303] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease: (returnval){ [ 825.809303] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52541363-0de4-6927-681e-198e32371887" [ 825.809303] env[62109]: _type = "HttpNfcLease" [ 825.809303] env[62109]: } obtained for exporting VM: (result){ [ 825.809303] env[62109]: value = "vm-244445" [ 825.809303] env[62109]: _type = "VirtualMachine" [ 825.809303] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 825.809648] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the lease: (returnval){ [ 825.809648] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52541363-0de4-6927-681e-198e32371887" [ 825.809648] env[62109]: _type = "HttpNfcLease" [ 825.809648] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 825.816675] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 825.816675] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52541363-0de4-6927-681e-198e32371887" [ 825.816675] env[62109]: _type = "HttpNfcLease" [ 825.816675] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 825.876854] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.880997] env[62109]: INFO nova.compute.resource_tracker [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating resource usage from migration a7f68b5f-2580-46ec-a485-50b41584f55e [ 825.926470] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: d727d597-c4ac-426e-bdc3-fc4f73a3eac9] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 826.262753] env[62109]: INFO nova.compute.manager [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Took 42.55 seconds to build instance. [ 826.287966] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 826.288315] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59da6181-8d86-4a7e-8b56-1924f526d9f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.296263] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 826.296263] env[62109]: value = "task-1116506" [ 826.296263] env[62109]: _type = "Task" [ 826.296263] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.310216] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.321293] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 826.321293] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52541363-0de4-6927-681e-198e32371887" [ 826.321293] env[62109]: _type = "HttpNfcLease" [ 826.321293] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 826.321604] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 826.321604] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52541363-0de4-6927-681e-198e32371887" [ 826.321604] env[62109]: _type = "HttpNfcLease" [ 826.321604] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 826.322532] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dccf78-d846-4f93-b2d1-d879a9b36801 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.330739] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 826.330739] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 826.430037] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 46aa78cc-ea0a-4c1b-aadb-f2a4856c9371] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 826.451082] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3d6bde90-c188-43f0-8a28-56a18f37b4bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.477297] env[62109]: INFO nova.compute.manager [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Rescuing [ 826.477579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.477741] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.477954] env[62109]: DEBUG nova.network.neutron [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 826.568471] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c278d3-9cdb-427a-a966-9bf40c8371bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.576223] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44117b0d-d519-4d7d-be10-096ec272fe50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.607683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd84a41d-d9cd-4ce6-b41e-567c20ceabed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.615258] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145676bc-e6e3-4500-b06d-a3d97a00fec0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.628732] env[62109]: DEBUG nova.compute.provider_tree [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.756474] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.771032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81b2dbc1-75fa-4262-aedb-6b2ad2046d06 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.662s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.812319] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116506, 'name': PowerOffVM_Task, 'duration_secs': 0.208293} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.813068] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 826.813211] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 826.814132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd9f607-5b89-43c3-8a96-821c4ecf6cf8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.822184] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 826.822470] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a599b497-9c44-40cf-a977-780278e71cde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.886335] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 826.886503] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 826.886561] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore1] c694c178-3894-4997-8e99-8f4900a64848 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 826.886826] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cc0123d-1d12-4635-9a74-24cf94650d4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.894991] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 826.894991] env[62109]: value = "task-1116508" [ 826.894991] env[62109]: _type = "Task" [ 826.894991] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.909816] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.934598] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.935384] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.936085] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.936085] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.936085] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.936212] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.936411] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.936654] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.937097] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.937222] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.937410] env[62109]: DEBUG nova.virt.hardware [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.937978] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 17ee49a9-d980-46c0-996e-6a43c80be434] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 826.940762] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37166d24-6722-46ee-8cff-6c0d85f6e849 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.949459] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1379ca-3b36-4fff-8051-73e18a11c732 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.134270] env[62109]: DEBUG nova.scheduler.client.report [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.183652] env[62109]: DEBUG nova.compute.manager [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Received event network-vif-plugged-56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.183965] env[62109]: DEBUG oslo_concurrency.lockutils [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.184655] env[62109]: DEBUG oslo_concurrency.lockutils [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] Lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.184904] env[62109]: DEBUG oslo_concurrency.lockutils [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] Lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.185134] env[62109]: DEBUG nova.compute.manager [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] No waiting events found dispatching network-vif-plugged-56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 827.185327] env[62109]: WARNING nova.compute.manager [req-3e289458-7ea4-46b5-b30b-13f72ccd9a96 req-495f6a3e-2553-4847-87c7-1223e54b1c70 service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Received unexpected event network-vif-plugged-56c50c1b-b3f7-4097-b080-6b487489343b for instance with vm_state building and task_state spawning. [ 827.289185] env[62109]: DEBUG nova.network.neutron [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updating instance_info_cache with network_info: [{"id": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "address": "fa:16:3e:6c:a1:54", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33eb191-a4", "ovs_interfaceid": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.382890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.383070] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.383551] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.383781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.383984] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.386316] env[62109]: INFO nova.compute.manager [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Terminating instance [ 827.388662] env[62109]: DEBUG nova.compute.manager [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 827.388969] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ea5e70b-bc1d-4ea6-bc3e-3eec5f466290 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.402108] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123fb804-bd01-44bb-b807-f9a85784467e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.418578] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161264} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.420408] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.420408] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 827.420408] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.436833] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 could not be found. [ 827.437113] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 827.437586] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8fbe958-da1d-4225-8e28-586005a496bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.443933] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 934e3a1b-8d3f-4de0-ae8b-35b82d3859a1] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 827.448744] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f799954f-e241-44fd-bbe3-54757b2bd08e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.481880] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a24f2349-7c1b-441d-a36e-b16dd61f6031 could not be found. [ 827.482404] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.482475] env[62109]: INFO nova.compute.manager [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Took 0.09 seconds to destroy the instance on the hypervisor. [ 827.482793] env[62109]: DEBUG oslo.service.loopingcall [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.483122] env[62109]: DEBUG nova.compute.manager [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 827.483320] env[62109]: DEBUG nova.network.neutron [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.642954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.781s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.643201] env[62109]: INFO nova.compute.manager [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Migrating [ 827.643454] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.643607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.646448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.266s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.646448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.647977] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.928s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.648281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.650259] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.730s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.651978] env[62109]: INFO nova.compute.claims [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.655663] env[62109]: INFO nova.compute.rpcapi [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 827.656222] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.675632] env[62109]: INFO nova.scheduler.client.report [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Deleted allocations for instance 7f40cdc8-3421-47b7-b148-ff6417105dbb [ 827.693161] env[62109]: INFO nova.scheduler.client.report [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Deleted allocations for instance 8b6ec904-8c68-4eaa-94fe-47a87528e26b [ 827.778933] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Successfully updated port: 56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.795364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.812703] env[62109]: DEBUG nova.compute.manager [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Received event network-changed-56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.815011] env[62109]: DEBUG nova.compute.manager [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Refreshing instance network info cache due to event network-changed-56c50c1b-b3f7-4097-b080-6b487489343b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.815011] env[62109]: DEBUG oslo_concurrency.lockutils [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] Acquiring lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.815011] env[62109]: DEBUG oslo_concurrency.lockutils [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] Acquired lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.815011] env[62109]: DEBUG nova.network.neutron [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Refreshing network info cache for port 56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 827.960989] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 13988400-7b35-4175-9410-84eff918111d] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 827.999289] env[62109]: WARNING oslo_messaging._drivers.amqpdriver [-] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 828.173765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.173765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.173765] env[62109]: DEBUG nova.network.neutron [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.187111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a632ecd4-b5df-4324-916a-3655c02d3c56 tempest-FloatingIPsAssociationNegativeTestJSON-425141119 tempest-FloatingIPsAssociationNegativeTestJSON-425141119-project-member] Lock "7f40cdc8-3421-47b7-b148-ff6417105dbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.144s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.201125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a0f89c4e-f7ee-4d84-a7dc-cf90d25c8583 tempest-ServersTestJSON-1176399938 tempest-ServersTestJSON-1176399938-project-member] Lock "8b6ec904-8c68-4eaa-94fe-47a87528e26b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.093s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.281689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.339257] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 828.339257] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f9ee6de-91ff-42ca-b11f-22dae85648a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.349675] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 828.349675] env[62109]: value = "task-1116509" [ 828.349675] env[62109]: _type = "Task" [ 828.349675] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.361028] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.382291] env[62109]: DEBUG nova.network.neutron [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.460771] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 828.461126] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 828.461317] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.461517] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 828.461668] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.461819] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 828.462041] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 828.462433] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 828.462645] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 828.462822] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 828.463034] env[62109]: DEBUG nova.virt.hardware [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.464260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332bf929-409b-4216-9910-f3e74f2051a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.467340] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.467446] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Cleaning up deleted instances with incomplete migration {{(pid=62109) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 828.476919] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce28f7ef-0789-4237-ae26-19d548505185 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.499850] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:1a:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d487765-cb55-45bd-b4f2-b2cddcf12cfd', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.507880] env[62109]: DEBUG oslo.service.loopingcall [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.508304] env[62109]: DEBUG nova.network.neutron [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.510550] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 828.510780] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56cf79f9-6b73-42b2-b25b-34c5aa0d7108 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.532573] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.532573] env[62109]: value = "task-1116510" [ 828.532573] env[62109]: _type = "Task" [ 828.532573] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.546425] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116510, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.548737] env[62109]: DEBUG nova.network.neutron [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.860904] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116509, 'name': PowerOffVM_Task, 'duration_secs': 0.23359} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.861602] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 828.862570] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11979c0-b7f1-4e67-8c57-802f6598e94c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.899644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2096ed4c-9e2b-41df-b0a8-0a65ae6c3d51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.936151] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 828.936491] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5fe9511-044d-4b68-9be6-7201f7114b69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.946187] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 828.946187] env[62109]: value = "task-1116511" [ 828.946187] env[62109]: _type = "Task" [ 828.946187] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.954952] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.973346] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.010728] env[62109]: INFO nova.compute.manager [-] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Took 1.53 seconds to deallocate network for instance. [ 829.053759] env[62109]: DEBUG oslo_concurrency.lockutils [req-be06a957-db05-4016-b766-6f267e684fcc req-ab047f50-8e4a-4250-91a9-45ae7871bc5e service nova] Releasing lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.053759] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116510, 'name': CreateVM_Task, 'duration_secs': 0.369362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.053947] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.054095] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 829.055115] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 829.055764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.055928] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.056817] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.057126] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64462515-0f51-4952-9044-874d8c6688a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.062946] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 829.062946] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52647bb8-87fc-70ed-e996-4dcf9cae3e97" [ 829.062946] env[62109]: _type = "Task" [ 829.062946] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.076571] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52647bb8-87fc-70ed-e996-4dcf9cae3e97, 'name': SearchDatastore_Task, 'duration_secs': 0.010492} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.077236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.077347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.077684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.077770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.077951] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.078241] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c033ce4-c0af-4b08-9941-927e3ed7cb01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.089795] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.089989] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 829.090768] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36bca9aa-02fc-4481-8648-b97313979b6c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.101098] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 829.101098] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e7568-5b9c-12cc-7daa-94bed9cacad7" [ 829.101098] env[62109]: _type = "Task" [ 829.101098] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.110071] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e7568-5b9c-12cc-7daa-94bed9cacad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.144047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf63165-3686-4d8e-9ffc-7624b3a84c9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.155028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9bb36f-d6f5-48b2-828c-0b5ad5b189a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.189646] env[62109]: DEBUG nova.network.neutron [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.191770] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd6b406-8e62-4fbc-b0d7-b170fe3aa60e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.201994] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f58957-26d2-4b93-9ff1-2a9721e9e4c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.215854] env[62109]: DEBUG nova.compute.provider_tree [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.241938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.242259] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.242484] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.242673] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.242868] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.245512] env[62109]: INFO nova.compute.manager [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Terminating instance [ 829.247592] env[62109]: DEBUG nova.compute.manager [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 829.247797] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 829.248660] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cc12b1-311d-4dab-948a-9b7c1ef6b0e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.256990] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 829.257274] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23bf2e37-f920-4838-81d9-13a15b722742 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.264025] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 829.264025] env[62109]: value = "task-1116512" [ 829.264025] env[62109]: _type = "Task" [ 829.264025] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.272057] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.462228] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 829.462228] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.462228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.610131] env[62109]: INFO nova.compute.manager [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Took 0.60 seconds to detach 1 volumes for instance. [ 829.614432] env[62109]: DEBUG nova.compute.manager [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Deleting volume: 8845b7d9-9e50-489a-94dd-17e0fa9b7a61 {{(pid=62109) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 829.623132] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.626124] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e7568-5b9c-12cc-7daa-94bed9cacad7, 'name': SearchDatastore_Task, 'duration_secs': 0.008629} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.627444] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc4761a-a909-41a3-b879-c76db59c7e30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.637031] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 829.637031] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5213aedf-6387-3031-df53-ae7b76430e85" [ 829.637031] env[62109]: _type = "Task" [ 829.637031] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.645503] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5213aedf-6387-3031-df53-ae7b76430e85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.696864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.718935] env[62109]: DEBUG nova.scheduler.client.report [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.776844] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116512, 'name': PowerOffVM_Task, 'duration_secs': 0.208391} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.779872] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 829.779872] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 829.779872] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ee6b8f7-3039-44a2-92ae-a3ba645604a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.843029] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 829.843029] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 829.843029] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Deleting the datastore file [datastore2] c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.843029] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef5aff9e-d7ba-4991-90f6-1256886180dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.851024] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for the task: (returnval){ [ 829.851024] env[62109]: value = "task-1116515" [ 829.851024] env[62109]: _type = "Task" [ 829.851024] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.860554] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.874684] env[62109]: DEBUG nova.compute.manager [req-7025147a-b3e3-4c98-9012-80656445b3e1 req-a9bc1326-f8d8-4d60-b0df-c2502a6ca95a service nova] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Received event network-vif-deleted-3cefabfe-3893-464d-ad9b-104d901e71c5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.147561] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5213aedf-6387-3031-df53-ae7b76430e85, 'name': SearchDatastore_Task, 'duration_secs': 0.011363} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.147928] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.148170] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 830.148468] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.148659] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.148878] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d1bc7b9-d267-475c-9fb8-e1a4f84ab7b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.154196] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2ac7baf-29c2-4ea7-ae17-294e43514fcc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.161536] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 830.161536] env[62109]: value = "task-1116516" [ 830.161536] env[62109]: _type = "Task" [ 830.161536] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.167436] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.167637] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 830.168887] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be638dc9-3b04-4720-80f7-cbc8cb7c303d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.175506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.175727] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.178929] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 830.178929] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e88ea6-cdf4-96bb-c787-4f3c0824c406" [ 830.178929] env[62109]: _type = "Task" [ 830.178929] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.187798] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e88ea6-cdf4-96bb-c787-4f3c0824c406, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.202831] env[62109]: DEBUG nova.network.neutron [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Updating instance_info_cache with network_info: [{"id": "56c50c1b-b3f7-4097-b080-6b487489343b", "address": "fa:16:3e:d6:f4:14", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c50c1b-b3", "ovs_interfaceid": "56c50c1b-b3f7-4097-b080-6b487489343b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.224944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.225568] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.229118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.435s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.230937] env[62109]: INFO nova.compute.claims [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.364977] env[62109]: DEBUG oslo_vmware.api [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Task: {'id': task-1116515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178697} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.365465] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.366670] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 830.366670] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.366670] env[62109]: INFO nova.compute.manager [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 830.366670] env[62109]: DEBUG oslo.service.loopingcall [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.366903] env[62109]: DEBUG nova.compute.manager [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.366980] env[62109]: DEBUG nova.network.neutron [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.675059] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503823} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.675632] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 830.675919] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.676281] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ba0cc62-3964-4644-946f-46f8e8d6f7a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.688244] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 830.688244] env[62109]: value = "task-1116517" [ 830.688244] env[62109]: _type = "Task" [ 830.688244] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.698035] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e88ea6-cdf4-96bb-c787-4f3c0824c406, 'name': SearchDatastore_Task, 'duration_secs': 0.009747} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.700402] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a1f9cb8-6fa5-4787-9587-6b207124f19f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.706926] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.710237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-5d656f91-d35f-45e1-8892-7cdacd306960" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.710725] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance network_info: |[{"id": "56c50c1b-b3f7-4097-b080-6b487489343b", "address": "fa:16:3e:d6:f4:14", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c50c1b-b3", "ovs_interfaceid": "56c50c1b-b3f7-4097-b080-6b487489343b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 830.714155] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 830.714155] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e7be6a-900f-f083-9655-fe3ef611b3c3" [ 830.714155] env[62109]: _type = "Task" [ 830.714155] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.714915] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:f4:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56c50c1b-b3f7-4097-b080-6b487489343b', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.725409] env[62109]: DEBUG oslo.service.loopingcall [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.725409] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 830.729352] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75875204-26f2-4108-a771-5f04670a9f53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.750744] env[62109]: DEBUG nova.compute.utils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.753288] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.753543] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.761433] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e7be6a-900f-f083-9655-fe3ef611b3c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009761} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.763633] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.764200] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 830.764855] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.764855] env[62109]: value = "task-1116518" [ 830.764855] env[62109]: _type = "Task" [ 830.764855] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.765127] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-700085bb-330b-4666-8734-b6165b995724 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.779269] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116518, 'name': CreateVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.780920] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 830.780920] env[62109]: value = "task-1116519" [ 830.780920] env[62109]: _type = "Task" [ 830.780920] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.790634] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.833387] env[62109]: DEBUG nova.policy [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f0db4664ce465b8e71928d55284d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cddb4c7a9ba442d98d6cf4f3ab30ad71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.203149] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092098} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.203667] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.205049] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef94726-3bb2-4311-85e9-f440d21e16f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.236917] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.238240] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94cfec50-4958-4909-afcd-a5c5b78532d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.260089] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e89e9f-62d3-4554-8947-0199cedbaa76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.263078] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 831.291540] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 831.296936] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 831.296936] env[62109]: value = "task-1116520" [ 831.296936] env[62109]: _type = "Task" [ 831.296936] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.310214] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116518, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.316271] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116519, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482513} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.319485] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 831.319717] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.320462] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b401fd-d70f-4673-b075-e1dd3f92b382 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.349100] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.352914] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3accec59-9433-44da-8d86-f10051470535 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.372910] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 831.372910] env[62109]: value = "task-1116521" [ 831.372910] env[62109]: _type = "Task" [ 831.372910] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.386231] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116521, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.437545] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Successfully created port: a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.785826] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116518, 'name': CreateVM_Task, 'duration_secs': 0.545376} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.786024] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 831.787012] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.787210] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.787542] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 831.787879] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39498883-bed8-401c-a7aa-7b89a712edb9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.794601] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 831.794601] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e445e5-783e-2da6-33a0-5e3c6132a576" [ 831.794601] env[62109]: _type = "Task" [ 831.794601] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.799055] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 831.799875] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c26f99-bb13-4845-b038-99db0358021c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.802911] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ccb9e40-a3bc-4595-955c-b7ed3630be5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.814475] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e445e5-783e-2da6-33a0-5e3c6132a576, 'name': SearchDatastore_Task, 'duration_secs': 0.010889} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.816897] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b24beb0-39f7-4f01-9b30-9dc2438aad1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.820283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.820606] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.820854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.821015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.821216] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.821782] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 831.821782] env[62109]: value = "task-1116522" [ 831.821782] env[62109]: _type = "Task" [ 831.821782] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.825599] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f4e2b12-a5b2-4ab0-9904-be98f58f25a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.828182] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116520, 'name': ReconfigVM_Task, 'duration_secs': 0.351714} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.832191] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Reconfigured VM instance instance-0000003c to attach disk [datastore2] c694c178-3894-4997-8e99-8f4900a64848/c694c178-3894-4997-8e99-8f4900a64848.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.861393] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8104d8c5-2cdc-459f-a77b-9e0b583a2d20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.866019] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9633ef03-c1cb-41c1-9040-e9cc7d9c71e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.871689] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.871893] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 831.876037] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.876037] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e385e26e-2ce3-454d-b6d6-c43f7e119e60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.878853] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 831.878853] env[62109]: value = "task-1116523" [ 831.878853] env[62109]: _type = "Task" [ 831.878853] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.883459] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fa9617-c1f1-4047-8ca2-2c886350f81c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.889715] env[62109]: DEBUG nova.network.neutron [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.898063] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 831.898063] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52756e99-667d-bdbe-2b57-8954c7cdad94" [ 831.898063] env[62109]: _type = "Task" [ 831.898063] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.925366] env[62109]: DEBUG nova.compute.provider_tree [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.928854] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116523, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.928854] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.931040] env[62109]: DEBUG nova.compute.manager [req-11e552b5-4a0d-4c14-9219-7749c918ef24 req-48da1fd9-a52d-4d2a-9d56-68fc7d5c9783 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Received event network-vif-deleted-ef28f215-7e05-46fd-ad13-33c6eab750a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.931276] env[62109]: INFO nova.compute.manager [req-11e552b5-4a0d-4c14-9219-7749c918ef24 req-48da1fd9-a52d-4d2a-9d56-68fc7d5c9783 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Neutron deleted interface ef28f215-7e05-46fd-ad13-33c6eab750a4; detaching it from the instance and deleting it from the info cache [ 831.931581] env[62109]: DEBUG nova.network.neutron [req-11e552b5-4a0d-4c14-9219-7749c918ef24 req-48da1fd9-a52d-4d2a-9d56-68fc7d5c9783 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.937905] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52756e99-667d-bdbe-2b57-8954c7cdad94, 'name': SearchDatastore_Task, 'duration_secs': 0.011631} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.939711] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bab1691-2a54-4ecd-b1f0-a53df0d81edf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.948176] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 831.948176] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3ade3-9158-37e3-bd9d-369440f1565c" [ 831.948176] env[62109]: _type = "Task" [ 831.948176] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.959805] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3ade3-9158-37e3-bd9d-369440f1565c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.279248] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.312136] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.312798] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.313067] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.313350] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.314144] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.314358] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.314708] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.314808] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.315091] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.315318] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.315558] env[62109]: DEBUG nova.virt.hardware [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.316478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3d5672-c248-45ee-bdd9-b8fec33e8361 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.328922] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176ccc09-74d1-4e43-997e-c8c97f674606 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.342315] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116522, 'name': PowerOffVM_Task, 'duration_secs': 0.260794} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.353948] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 832.354391] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 832.392392] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116521, 'name': ReconfigVM_Task, 'duration_secs': 0.666445} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.399023] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfigured VM instance instance-0000003d to attach disk [datastore2] af3465db-fd56-458d-a499-14df3a0029f0/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.399023] env[62109]: INFO nova.compute.manager [-] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Took 2.03 seconds to deallocate network for instance. [ 832.399815] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3487dc45-5f47-4de3-ad34-78c8a7e49d31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.416338] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116523, 'name': Rename_Task, 'duration_secs': 0.171585} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.420969] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 832.440161] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8b97b71-cea4-46e0-9233-f1fad7931dc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.442762] env[62109]: DEBUG nova.scheduler.client.report [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.454323] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d4115df-c2ae-4a2a-81d2-ff3de2417bad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.456641] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7954f674-2886-4af5-a6cd-0b618e3ed0c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.482811] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 832.482811] env[62109]: value = "task-1116524" [ 832.482811] env[62109]: _type = "Task" [ 832.482811] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.488046] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8ce2fe-dd9a-4b75-b29a-7166ff8c5f80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.507291] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d3ade3-9158-37e3-bd9d-369440f1565c, 'name': SearchDatastore_Task, 'duration_secs': 0.015014} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.507512] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 832.507512] env[62109]: value = "task-1116525" [ 832.507512] env[62109]: _type = "Task" [ 832.507512] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.508603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.508917] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 832.509666] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-893a7591-8e3d-4029-b7e9-eafcf51f8630 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.519822] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.536394] env[62109]: DEBUG nova.compute.manager [req-11e552b5-4a0d-4c14-9219-7749c918ef24 req-48da1fd9-a52d-4d2a-9d56-68fc7d5c9783 service nova] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Detach interface failed, port_id=ef28f215-7e05-46fd-ad13-33c6eab750a4, reason: Instance c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 832.542080] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.543859] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 832.543859] env[62109]: value = "task-1116526" [ 832.543859] env[62109]: _type = "Task" [ 832.543859] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.553313] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.863789] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.864104] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.864402] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.864662] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.864824] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.865041] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.865282] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.865443] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.865617] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.865787] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.865969] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.871575] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4fd4f44-dba7-4f55-988f-df3ea3be2ead {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.897686] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 832.897686] env[62109]: value = "task-1116527" [ 832.897686] env[62109]: _type = "Task" [ 832.897686] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.908031] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116527, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.922609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.956959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.956959] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 832.958864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.560s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.960390] env[62109]: INFO nova.compute.claims [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.022675] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116524, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.034741] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116525, 'name': ReconfigVM_Task, 'duration_secs': 0.290776} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.035133] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 833.035434] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb78c821-7c8d-4675-9e14-629d80f41721 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.045958] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 833.045958] env[62109]: value = "task-1116528" [ 833.045958] env[62109]: _type = "Task" [ 833.045958] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.056744] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.060595] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116526, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.365155] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Successfully updated port: a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.411792] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116527, 'name': ReconfigVM_Task, 'duration_secs': 0.208437} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.411948] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 833.467768] env[62109]: DEBUG nova.compute.utils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.471267] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 833.471382] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 833.522184] env[62109]: DEBUG oslo_vmware.api [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116524, 'name': PowerOnVM_Task, 'duration_secs': 0.767614} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.522184] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 833.522184] env[62109]: DEBUG nova.compute.manager [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 833.522184] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a6edfc-86af-4dc2-b62e-cf83498616bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.557056] env[62109]: DEBUG nova.policy [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4cac0fc21b44e719f3fbb91cbfeeb20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd79b6e383494f2bb88bd4a0e388f18d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 833.568145] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519252} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.568585] env[62109]: DEBUG oslo_vmware.api [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116528, 'name': PowerOnVM_Task, 'duration_secs': 0.501887} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.568970] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 833.569364] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.569769] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 833.572196] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6abd07db-0c4c-4235-ab28-e646955be5d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.576114] env[62109]: DEBUG nova.compute.manager [None req-3bf6eda3-c790-4b7a-a427-7bd2ffe9f9bf tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 833.579018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90354809-ed6d-4ea9-a08e-4f04800b7d5c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.590104] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 833.590104] env[62109]: value = "task-1116529" [ 833.590104] env[62109]: _type = "Task" [ 833.590104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.605818] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.868060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.868060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.868699] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.918390] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:53:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='909148fc-609f-4b07-aa86-4e8ce4ae4b3f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-757017005',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.918650] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.918815] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.919013] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.919178] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.919348] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.919558] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.919720] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.919888] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.920083] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.920297] env[62109]: DEBUG nova.virt.hardware [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.926727] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 833.927155] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-546def04-974b-45f7-9568-8f33e2699649 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.948990] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 833.948990] env[62109]: value = "task-1116530" [ 833.948990] env[62109]: _type = "Task" [ 833.948990] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.958823] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.973251] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.049863] env[62109]: DEBUG nova.compute.manager [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received event network-vif-plugged-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.050107] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Acquiring lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.050929] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.050929] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.050929] env[62109]: DEBUG nova.compute.manager [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] No waiting events found dispatching network-vif-plugged-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 834.050929] env[62109]: WARNING nova.compute.manager [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received unexpected event network-vif-plugged-a06370a9-effe-4205-85fa-bfa658250da0 for instance with vm_state building and task_state spawning. [ 834.051086] env[62109]: DEBUG nova.compute.manager [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received event network-changed-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.051155] env[62109]: DEBUG nova.compute.manager [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing instance network info cache due to event network-changed-a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 834.051292] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Acquiring lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.053403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.111130] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115668} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.111694] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.117014] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ba8d12-defd-4fe6-9ee4-61b7539348e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.140127] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.144039] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d64b40b-1630-4fbc-8975-c88d1a2ac77b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.166163] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 834.166163] env[62109]: value = "task-1116531" [ 834.166163] env[62109]: _type = "Task" [ 834.166163] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.175551] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.203299] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Successfully created port: 4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.439886] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.464453] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116530, 'name': ReconfigVM_Task, 'duration_secs': 0.414231} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.464871] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 834.465708] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f3be5a-a80c-434f-a973-30d5226d720c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.490830] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.503133] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c556374-c830-47a0-aea6-b40f1fab9db8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.527426] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 834.527426] env[62109]: value = "task-1116532" [ 834.527426] env[62109]: _type = "Task" [ 834.527426] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.538852] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116532, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.557467] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4100ee-b029-4e1c-9f79-95a613a36ecb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.566218] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9190fdf1-b793-4137-9ebe-53f02d99fee1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.613512] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12336ba3-fb8d-450f-9bd1-448068a12789 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.619838] env[62109]: INFO nova.compute.manager [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Unrescuing [ 834.620112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.620279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquired lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.620600] env[62109]: DEBUG nova.network.neutron [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 834.623472] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e104be6c-dcd1-4f7b-8f10-00a3a24555ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.644074] env[62109]: DEBUG nova.compute.provider_tree [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.679560] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.843234] env[62109]: DEBUG nova.network.neutron [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [{"id": "a06370a9-effe-4205-85fa-bfa658250da0", "address": "fa:16:3e:3d:f3:6c", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06370a9-ef", "ovs_interfaceid": "a06370a9-effe-4205-85fa-bfa658250da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.021447] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.036875] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116532, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.052123] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.052329] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.052472] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.052658] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.052821] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.052975] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.053308] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.053531] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.053812] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.053954] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.054153] env[62109]: DEBUG nova.virt.hardware [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.054979] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c311bf3d-d2d1-4ac3-b1ff-f1ba2c6a6ca9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.063956] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab9a516-eebe-44de-8147-583684e344e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.148084] env[62109]: DEBUG nova.scheduler.client.report [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.183838] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116531, 'name': ReconfigVM_Task, 'duration_secs': 0.578395} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.188259] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.189554] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3c260e4-508f-48f9-93be-b02f84c3249a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.196501] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 835.196501] env[62109]: value = "task-1116533" [ 835.196501] env[62109]: _type = "Task" [ 835.196501] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.206405] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116533, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.349388] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.349388] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Instance network_info: |[{"id": "a06370a9-effe-4205-85fa-bfa658250da0", "address": "fa:16:3e:3d:f3:6c", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06370a9-ef", "ovs_interfaceid": "a06370a9-effe-4205-85fa-bfa658250da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 835.349556] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Acquired lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.349556] env[62109]: DEBUG nova.network.neutron [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing network info cache for port a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 835.349556] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:f3:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0dd3c126-9d86-4f9a-b81c-e9627c7a5401', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a06370a9-effe-4205-85fa-bfa658250da0', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.359718] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Creating folder: Project (cddb4c7a9ba442d98d6cf4f3ab30ad71). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 835.360611] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90bda184-bc4c-43c9-83e5-7972c3a31231 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.374043] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Created folder: Project (cddb4c7a9ba442d98d6cf4f3ab30ad71) in parent group-v244329. [ 835.374732] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Creating folder: Instances. Parent ref: group-v244448. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 835.375373] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c17cce0-1845-45da-9aaf-77e172ec706c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.392536] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Created folder: Instances in parent group-v244448. [ 835.392536] env[62109]: DEBUG oslo.service.loopingcall [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.392536] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 835.393468] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8b33552-af25-4642-8394-fcaa064d8345 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.416922] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "c694c178-3894-4997-8e99-8f4900a64848" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.417230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.417634] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "c694c178-3894-4997-8e99-8f4900a64848-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.418022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.418227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.420752] env[62109]: INFO nova.compute.manager [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Terminating instance [ 835.423436] env[62109]: DEBUG nova.compute.manager [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 835.423615] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 835.424528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d559816-960e-40a0-b2d7-bca13e561726 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.429824] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.429824] env[62109]: value = "task-1116536" [ 835.429824] env[62109]: _type = "Task" [ 835.429824] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.436185] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 835.436864] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dc8b4af-a9cb-4041-9bd0-59876f7ec1f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.443020] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116536, 'name': CreateVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.446745] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 835.446745] env[62109]: value = "task-1116537" [ 835.446745] env[62109]: _type = "Task" [ 835.446745] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.457765] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.491663] env[62109]: DEBUG nova.network.neutron [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updating instance_info_cache with network_info: [{"id": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "address": "fa:16:3e:6c:a1:54", "network": {"id": "41fbc2a0-d530-4b40-ba33-39d1ea6d6046", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1834365577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "45ad1c039aa9463e977cf986ce4dccf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33eb191-a4", "ovs_interfaceid": "a33eb191-a49c-4a63-8f1a-569b4fcbc346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.540874] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116532, 'name': ReconfigVM_Task, 'duration_secs': 0.751782} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.542106] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.542106] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 835.657168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.657168] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.663030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.330s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.713148] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116533, 'name': Rename_Task, 'duration_secs': 0.235105} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.716705] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 835.721090] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b8d0437-8008-40e5-9bf5-180e25bde430 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.726227] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 835.726227] env[62109]: value = "task-1116538" [ 835.726227] env[62109]: _type = "Task" [ 835.726227] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.740110] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.944353] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116536, 'name': CreateVM_Task, 'duration_secs': 0.446461} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.944659] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 835.945437] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.945642] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.947442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.947442] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fa111af-252b-4be2-9237-1f793603ce8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.956619] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 835.956619] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f32275-1dca-fa6a-b014-6bcf044ae3ee" [ 835.956619] env[62109]: _type = "Task" [ 835.956619] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.962137] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116537, 'name': PowerOffVM_Task, 'duration_secs': 0.202186} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.972345] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 835.972345] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 835.972345] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee334004-a9c4-4ebf-9521-4f3d48da8e8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.981818] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f32275-1dca-fa6a-b014-6bcf044ae3ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010907} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.981818] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.981818] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.982185] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.982185] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.982987] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.982987] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ee45a80-6e1c-42a7-8647-c350f17efb98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.995281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Releasing lock "refresh_cache-af3465db-fd56-458d-a499-14df3a0029f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.995964] env[62109]: DEBUG nova.objects.instance [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lazy-loading 'flavor' on Instance uuid af3465db-fd56-458d-a499-14df3a0029f0 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.999109] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.999315] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 836.000686] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd375a6-1ff3-4c9f-b993-b15a5dff9568 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.009041] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 836.009041] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525c7fe0-55bb-b090-1d67-95b1dd8cd481" [ 836.009041] env[62109]: _type = "Task" [ 836.009041] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.024688] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525c7fe0-55bb-b090-1d67-95b1dd8cd481, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.036570] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7756bd45-d223-45d8-ba2b-2b05d6860b7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.044897] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 836.045217] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 836.045470] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore2] c694c178-3894-4997-8e99-8f4900a64848 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.046071] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c3d3ea5-f65e-422e-884b-06ddae7fb9f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.056867] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 836.056867] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ff97ae-2517-86b0-21d6-f3317c617cf2" [ 836.056867] env[62109]: _type = "Task" [ 836.056867] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.060709] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b6ffa0-a55c-4601-bafd-6c80e6508be9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.064253] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "5bea4229-6182-445e-b569-e7413ce92b93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.064560] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.075448] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 836.075448] env[62109]: value = "task-1116540" [ 836.075448] env[62109]: _type = "Task" [ 836.075448] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.104065] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e578017-5a7a-4a1c-972d-8963b1a1a962 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.107769] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ff97ae-2517-86b0-21d6-f3317c617cf2, 'name': SearchDatastore_Task, 'duration_secs': 0.009813} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.108993] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.109203] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 7ace6356-1a81-4095-8286-c9b6d829062b/7ace6356-1a81-4095-8286-c9b6d829062b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 836.117923] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39308911-8015-4244-be83-fae429d293d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.119556] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.139072] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 836.154316] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 836.154316] env[62109]: value = "task-1116541" [ 836.154316] env[62109]: _type = "Task" [ 836.154316] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.161332] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.163602] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 836.168394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994a2e21-f7de-4790-a570-093bd8d872f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.173417] env[62109]: DEBUG nova.compute.utils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.175610] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 836.175610] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.183197] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 836.183571] env[62109]: ERROR oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk due to incomplete transfer. [ 836.183743] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e62a3857-8144-40a2-940b-346c5f0cb01f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.191701] env[62109]: DEBUG oslo_vmware.rw_handles [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52db19dd-3f74-5745-0773-51d039117e69/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 836.191931] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploaded image 871af3c4-4325-4264-8f1d-6cfc2a52477d to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 836.194652] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 836.198870] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f529d802-f4b7-40fc-a3ce-45196123a78e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.212078] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 836.212078] env[62109]: value = "task-1116542" [ 836.212078] env[62109]: _type = "Task" [ 836.212078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.228023] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116542, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.242763] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116538, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.293287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36f7bbc-1f83-41fe-8552-2789dea352df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.302501] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b892bd26-62d6-444a-9a9d-e35b562cef6c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.343963] env[62109]: DEBUG nova.policy [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4cac0fc21b44e719f3fbb91cbfeeb20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd79b6e383494f2bb88bd4a0e388f18d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.349730] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef63362-21e5-492f-b7d6-18eabcb3aefc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.353445] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.353798] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.367278] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccc3aa1-0a4f-4fce-b919-1edcde3f5c23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.389881] env[62109]: DEBUG nova.compute.provider_tree [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.507521] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febdcf52-cd32-4259-8afe-0d426d6166cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.512213] env[62109]: DEBUG nova.network.neutron [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updated VIF entry in instance network info cache for port a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 836.512318] env[62109]: DEBUG nova.network.neutron [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [{"id": "a06370a9-effe-4205-85fa-bfa658250da0", "address": "fa:16:3e:3d:f3:6c", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06370a9-ef", "ovs_interfaceid": "a06370a9-effe-4205-85fa-bfa658250da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.536967] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 836.538239] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4e48d41-71a2-4ad6-b7a8-1d3ffabe44a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.547772] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 836.547772] env[62109]: value = "task-1116543" [ 836.547772] env[62109]: _type = "Task" [ 836.547772] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.560660] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.568391] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.591290] env[62109]: DEBUG oslo_vmware.api [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197058} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.591290] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.591290] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 836.591290] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 836.591290] env[62109]: INFO nova.compute.manager [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Took 1.17 seconds to destroy the instance on the hypervisor. [ 836.591459] env[62109]: DEBUG oslo.service.loopingcall [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.591459] env[62109]: DEBUG nova.compute.manager [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 836.591459] env[62109]: DEBUG nova.network.neutron [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 836.668351] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116541, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.678940] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.727694] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116542, 'name': Destroy_Task, 'duration_secs': 0.485714} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.727694] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroyed the VM [ 836.727694] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 836.727694] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c3ff3809-2e4f-4247-80f4-603257754498 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.738112] env[62109]: DEBUG oslo_vmware.api [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116538, 'name': PowerOnVM_Task, 'duration_secs': 0.733442} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.739607] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 836.740467] env[62109]: INFO nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Took 9.83 seconds to spawn the instance on the hypervisor. [ 836.740467] env[62109]: DEBUG nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.740467] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 836.740467] env[62109]: value = "task-1116544" [ 836.740467] env[62109]: _type = "Task" [ 836.740467] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.741210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0d159e-031b-4a67-9f6c-301bd71957a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.755776] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.756032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.756491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.756491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.756665] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.759629] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116544, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.762037] env[62109]: INFO nova.compute.manager [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Terminating instance [ 836.764028] env[62109]: DEBUG nova.compute.manager [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 836.764236] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 836.765282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a273cb1-b32a-4cf1-9558-b6455b702428 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.768423] env[62109]: DEBUG nova.network.neutron [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Port de4056dc-a527-43f0-ad81-f82e5cb00f86 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 836.776671] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 836.776671] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc417111-eff4-4c4c-b0eb-8ca59d6076e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.783028] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 836.783028] env[62109]: value = "task-1116545" [ 836.783028] env[62109]: _type = "Task" [ 836.783028] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.785839] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Successfully updated port: 4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.797991] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.859723] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.898715] env[62109]: DEBUG nova.scheduler.client.report [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.903437] env[62109]: DEBUG oslo_concurrency.lockutils [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.239s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.903854] env[62109]: INFO nova.compute.manager [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Successfully reverted task state from None on failure for instance. [ 836.907969] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.905s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.909714] env[62109]: INFO nova.compute.claims [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server [None req-502e08df-ddfe-4ff7-b552-54793e14d90a tempest-ServersAaction247Test-2105068673 tempest-ServersAaction247Test-2105068673-project-member] Exception during message handling: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 574e9717-c25e-453d-8028-45d9e2f95398 (generation 76): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21622ef4-3e12-4c0c-9572-2fec11d0248c"}]} [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.913880] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.914239] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3305, in _delete_instance [ 836.914706] env[62109]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 926, in _complete_deletion [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 693, in _update_resource_tracker [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 702, in update_usage [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server raise attempt.get() [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 836.915203] env[62109]: ERROR oslo_messaging.rpc.server raise value [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1498, in update_from_provider_tree [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1003, in set_inventory_for_provider [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateConflict( [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 574e9717-c25e-453d-8028-45d9e2f95398 (generation 76): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21622ef4-3e12-4c0c-9572-2fec11d0248c"}]} [ 836.915671] env[62109]: ERROR oslo_messaging.rpc.server [ 836.936281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.936781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.957038] env[62109]: DEBUG nova.compute.manager [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Received event network-vif-plugged-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.957560] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] Acquiring lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.957560] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.957737] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.957917] env[62109]: DEBUG nova.compute.manager [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] No waiting events found dispatching network-vif-plugged-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.958112] env[62109]: WARNING nova.compute.manager [req-2e90eae4-ff79-40d2-a0ff-745413295be6 req-82dbb220-0a7f-4ffa-9f27-88c0eea60e1e service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Received unexpected event network-vif-plugged-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 for instance with vm_state building and task_state spawning. [ 837.040123] env[62109]: DEBUG oslo_concurrency.lockutils [req-f34f8a51-31c4-47c0-9b91-0c7d7fa2f614 req-6da7fa66-7f45-43f2-89e1-d3bd9ed8c898 service nova] Releasing lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.061372] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116543, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.094752] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Successfully created port: 7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.110972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.164018] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550473} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.164510] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 7ace6356-1a81-4095-8286-c9b6d829062b/7ace6356-1a81-4095-8286-c9b6d829062b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 837.164819] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.165154] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d80b2d24-0e21-4ec3-8097-b69c10b2d75e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.172734] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 837.172734] env[62109]: value = "task-1116546" [ 837.172734] env[62109]: _type = "Task" [ 837.172734] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.183129] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.257274] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116544, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.267227] env[62109]: INFO nova.compute.manager [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Took 52.24 seconds to build instance. [ 837.298675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.298854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.299016] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.301142] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116545, 'name': PowerOffVM_Task, 'duration_secs': 0.463751} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.301142] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 837.301142] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 837.301142] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8651fea-ed70-48ca-b0b7-66e7b91f8497 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.382824] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 837.382969] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 837.383153] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Deleting the datastore file [datastore2] 9b2968bb-ed06-4740-b43e-b4aa1fac76dd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.383378] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e11a75a-95eb-4b2c-94b4-9ca7e9615ea8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.391486] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for the task: (returnval){ [ 837.391486] env[62109]: value = "task-1116548" [ 837.391486] env[62109]: _type = "Task" [ 837.391486] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.402732] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.409030] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.557960] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116543, 'name': PowerOffVM_Task, 'duration_secs': 0.564221} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.558839] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 837.564299] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfiguring VM instance instance-0000003d to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 837.564980] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1184b50-180d-4103-8cd8-9a55b95f43b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.584222] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 837.584222] env[62109]: value = "task-1116549" [ 837.584222] env[62109]: _type = "Task" [ 837.584222] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.593673] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.686032] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082828} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.686032] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.686677] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e78b11a-413a-46f4-9cc3-dd32306cba16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.692099] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.721859] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 7ace6356-1a81-4095-8286-c9b6d829062b/7ace6356-1a81-4095-8286-c9b6d829062b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.722238] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1d6ee0a-4088-414d-8416-d8a5a0791933 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.748457] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.748711] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.748994] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.749062] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.749212] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.749367] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.749577] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.749766] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.750034] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.750203] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.750420] env[62109]: DEBUG nova.virt.hardware [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.755396] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335c659c-34c5-4773-9545-300338efd5ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.758127] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 837.758127] env[62109]: value = "task-1116550" [ 837.758127] env[62109]: _type = "Task" [ 837.758127] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.767252] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116544, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.768995] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8093876-4e02-4344-aeb1-abd83399be08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.776681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e6294788-1171-4219-b54c-57e4b163400a tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.979s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.785233] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.795267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.795267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.795267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.862957] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.885737] env[62109]: DEBUG nova.network.neutron [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.903987] env[62109]: DEBUG oslo_vmware.api [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Task: {'id': task-1116548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.424523} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.905358] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.905617] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 837.905838] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 837.909134] env[62109]: INFO nova.compute.manager [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 837.909134] env[62109]: DEBUG oslo.service.loopingcall [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.909134] env[62109]: DEBUG nova.compute.manager [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.909134] env[62109]: DEBUG nova.network.neutron [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.098757] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116549, 'name': ReconfigVM_Task, 'duration_secs': 0.364936} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.099088] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Reconfigured VM instance instance-0000003d to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 838.099249] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 838.099515] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee21f77e-76ab-42d1-b5a1-7518d86a2de9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.106355] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 838.106355] env[62109]: value = "task-1116551" [ 838.106355] env[62109]: _type = "Task" [ 838.106355] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.116336] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.164312] env[62109]: DEBUG nova.network.neutron [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [{"id": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "address": "fa:16:3e:f8:8b:04", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8bdd11-0b", "ovs_interfaceid": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.259837] env[62109]: DEBUG oslo_vmware.api [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116544, 'name': RemoveSnapshot_Task, 'duration_secs': 1.427923} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.264869] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 838.265012] env[62109]: INFO nova.compute.manager [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 17.13 seconds to snapshot the instance on the hypervisor. [ 838.280913] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.290929] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 838.388585] env[62109]: INFO nova.compute.manager [-] [instance: c694c178-3894-4997-8e99-8f4900a64848] Took 1.80 seconds to deallocate network for instance. [ 838.457931] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa71646-b6cb-46ac-a3e4-89c4c0afe606 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.469414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20888a10-ef8b-4b38-9e01-cb7c2077a789 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.505452] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ebdf3f-2b13-4858-a1cf-f5548fb680ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.513728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb716a3d-2153-41ba-8492-32e276830674 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.530717] env[62109]: DEBUG nova.compute.provider_tree [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.619334] env[62109]: DEBUG oslo_vmware.api [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116551, 'name': PowerOnVM_Task, 'duration_secs': 0.442555} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.619554] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 838.620025] env[62109]: DEBUG nova.compute.manager [None req-c59c7ef0-10f7-4383-9f6e-1e5902081ac2 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.620652] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d472a616-2d8f-4fec-a243-dac5ab17d29f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.669509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.669509] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Instance network_info: |[{"id": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "address": "fa:16:3e:f8:8b:04", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8bdd11-0b", "ovs_interfaceid": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.670762] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:8b:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.679519] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating folder: Project (fd79b6e383494f2bb88bd4a0e388f18d). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.681057] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88ce525a-b1a1-4247-8cdd-0193790607be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.696720] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created folder: Project (fd79b6e383494f2bb88bd4a0e388f18d) in parent group-v244329. [ 838.697090] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating folder: Instances. Parent ref: group-v244451. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.697178] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55a2a2e8-bd04-4b17-a968-e019f1d082a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.708368] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created folder: Instances in parent group-v244451. [ 838.708646] env[62109]: DEBUG oslo.service.loopingcall [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.708899] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 838.709974] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42060a33-9c42-4cba-b926-c6f3f205e93b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.736030] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.736030] env[62109]: value = "task-1116554" [ 838.736030] env[62109]: _type = "Task" [ 838.736030] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.744664] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116554, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.774519] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116550, 'name': ReconfigVM_Task, 'duration_secs': 0.724281} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.774868] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 7ace6356-1a81-4095-8286-c9b6d829062b/7ace6356-1a81-4095-8286-c9b6d829062b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.778156] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38bd098c-ff28-4707-855f-87202e3d3342 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.784984] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 838.784984] env[62109]: value = "task-1116555" [ 838.784984] env[62109]: _type = "Task" [ 838.784984] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.798046] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116555, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.822084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.825998] env[62109]: DEBUG nova.compute.manager [None req-74375a3d-1719-4a91-9764-c6510aa7bff3 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Found 1 images (rotation: 2) {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 838.902761] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.918176] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.918448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.918633] env[62109]: DEBUG nova.network.neutron [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.035613] env[62109]: DEBUG nova.scheduler.client.report [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.080029] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Successfully updated port: 7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.188631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.189975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.189975] env[62109]: DEBUG nova.compute.manager [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.192052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49284370-541a-4260-b295-2c0a4d821316 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.201078] env[62109]: DEBUG nova.compute.manager [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 839.201426] env[62109]: DEBUG nova.objects.instance [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'flavor' on Instance uuid 5d656f91-d35f-45e1-8892-7cdacd306960 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.245413] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116554, 'name': CreateVM_Task, 'duration_secs': 0.393894} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.245953] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 839.246276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.246439] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.246793] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.247071] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c30c0ac-6cdd-424e-9af2-091722c189ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.252730] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 839.252730] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5224df0b-9731-0c77-7a95-fa5f513e06ba" [ 839.252730] env[62109]: _type = "Task" [ 839.252730] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.261366] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5224df0b-9731-0c77-7a95-fa5f513e06ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.297940] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116555, 'name': Rename_Task, 'duration_secs': 0.179815} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.299018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 839.299018] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-051447e2-c92e-48b4-b533-949a15fe1e9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.304805] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 839.304805] env[62109]: value = "task-1116556" [ 839.304805] env[62109]: _type = "Task" [ 839.304805] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.313081] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116556, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.325385] env[62109]: DEBUG nova.network.neutron [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.543485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.544074] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.548826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.519s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.549894] env[62109]: INFO nova.compute.claims [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.582559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.582698] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.582850] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.605672] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Received event network-changed-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.605719] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Refreshing instance network info cache due to event network-changed-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.606237] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Acquiring lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.606237] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Acquired lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.606237] env[62109]: DEBUG nova.network.neutron [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Refreshing network info cache for port 4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 839.710217] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 839.710534] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a010a6de-e1b2-436c-a308-fbe143908bec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.718104] env[62109]: DEBUG oslo_vmware.api [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 839.718104] env[62109]: value = "task-1116557" [ 839.718104] env[62109]: _type = "Task" [ 839.718104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.729981] env[62109]: DEBUG oslo_vmware.api [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.763425] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5224df0b-9731-0c77-7a95-fa5f513e06ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009464} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.763750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.764015] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.764273] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.764432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.764633] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.764898] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65a52d33-e7e2-4594-ad18-e561399ca7c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.775185] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.775185] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 839.775185] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22fe815b-7ed9-4b41-a7d6-5f727670d09b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.779766] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 839.779766] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e559e9-2fb5-2a0e-ee92-95fc0c9016eb" [ 839.779766] env[62109]: _type = "Task" [ 839.779766] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.787793] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e559e9-2fb5-2a0e-ee92-95fc0c9016eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.815441] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116556, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.828434] env[62109]: INFO nova.compute.manager [-] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Took 1.92 seconds to deallocate network for instance. [ 839.886784] env[62109]: DEBUG nova.network.neutron [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.936291] env[62109]: DEBUG nova.compute.manager [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.936291] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb09c0c2-33d6-4a14-9002-0c8103bac73a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.035770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.036088] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.056729] env[62109]: DEBUG nova.compute.utils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.063194] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 840.063194] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 840.172023] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.187592] env[62109]: DEBUG nova.policy [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '741d963eb6fe473db210b0d6956e8193', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5435b5d2a57a47a9a087b0f466ed33b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.233023] env[62109]: DEBUG oslo_vmware.api [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116557, 'name': PowerOffVM_Task, 'duration_secs': 0.356001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.233023] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 840.233023] env[62109]: DEBUG nova.compute.manager [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 840.233023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4668ddb-4da3-467b-b672-45199cc38a27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.292909] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e559e9-2fb5-2a0e-ee92-95fc0c9016eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010164} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.293713] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4736835-3867-4414-96e9-8fc0e8e935b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.303919] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 840.303919] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1e372-87c8-f531-8135-1ad2c0d07dc5" [ 840.303919] env[62109]: _type = "Task" [ 840.303919] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.316785] env[62109]: DEBUG oslo_vmware.api [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116556, 'name': PowerOnVM_Task, 'duration_secs': 0.593344} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.320625] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 840.320845] env[62109]: INFO nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Took 8.04 seconds to spawn the instance on the hypervisor. [ 840.321703] env[62109]: DEBUG nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 840.321786] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1e372-87c8-f531-8135-1ad2c0d07dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.323553] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ffd528-50d2-431e-936f-4c0f47291438 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.326812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.327083] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/3da7aca9-5d65-4f5e-b0a3-7cf5308f0384.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 840.327366] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee2f7d15-af7a-42d6-97e7-43a1add146e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.339139] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.344027] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 840.344027] env[62109]: value = "task-1116558" [ 840.344027] env[62109]: _type = "Task" [ 840.344027] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.353950] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.392021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.451499] env[62109]: INFO nova.compute.manager [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] instance snapshotting [ 840.451499] env[62109]: DEBUG nova.objects.instance [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.566273] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.579394] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "af3465db-fd56-458d-a499-14df3a0029f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.579790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.580035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "af3465db-fd56-458d-a499-14df3a0029f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.580262] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.580459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.583051] env[62109]: INFO nova.compute.manager [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Terminating instance [ 840.585921] env[62109]: DEBUG nova.compute.manager [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 840.586549] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 840.587200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012d010b-b3c4-45e3-b7cc-f3b9ff9f60e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.604273] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 840.604273] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97a3a8af-fef2-42df-952f-50d9ad66be05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.612128] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 840.612128] env[62109]: value = "task-1116559" [ 840.612128] env[62109]: _type = "Task" [ 840.612128] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.630327] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116559, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.644540] env[62109]: DEBUG nova.network.neutron [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Updating instance_info_cache with network_info: [{"id": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "address": "fa:16:3e:7b:84:90", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7448d73b-2d", "ovs_interfaceid": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.741360] env[62109]: DEBUG nova.network.neutron [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updated VIF entry in instance network info cache for port 4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 840.741764] env[62109]: DEBUG nova.network.neutron [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [{"id": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "address": "fa:16:3e:f8:8b:04", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8bdd11-0b", "ovs_interfaceid": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.745651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8f40f910-445a-4087-83ae-c6f47e7db053 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.855478] env[62109]: INFO nova.compute.manager [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Took 43.97 seconds to build instance. [ 840.864539] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470231} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.864539] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/3da7aca9-5d65-4f5e-b0a3-7cf5308f0384.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 840.864539] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.864793] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b826f92c-4d8e-4300-b7b2-9efc29e68a9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.876048] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 840.876048] env[62109]: value = "task-1116560" [ 840.876048] env[62109]: _type = "Task" [ 840.876048] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.885786] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.923595] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712b1a74-5acd-45d6-8650-ddd7e745b01a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.961983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72cf9bd-83b7-4d24-9924-4d90413c1baf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.966357] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5c5dc3-5271-4dd5-b21f-6ecf61564502 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.989470] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Successfully created port: 70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.993778] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a36e51-b53b-433a-9edc-1ecfe17a8e4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.996578] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 841.104547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.104547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.129132] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116559, 'name': PowerOffVM_Task, 'duration_secs': 0.260118} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.129629] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 841.129889] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 841.130069] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acf7ab6c-f65a-4f57-a103-70f28f1a3106 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.147309] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.147627] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Instance network_info: |[{"id": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "address": "fa:16:3e:7b:84:90", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7448d73b-2d", "ovs_interfaceid": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 841.148073] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:84:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7448d73b-2d36-46d9-9f1f-3ed3ede34226', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.156265] env[62109]: DEBUG oslo.service.loopingcall [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.156829] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 841.159783] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-967f71da-2b33-444b-aef7-2303c9c45257 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.183330] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.183330] env[62109]: value = "task-1116562" [ 841.183330] env[62109]: _type = "Task" [ 841.183330] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.194925] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116562, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.195578] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51964a5-280c-4776-a9cf-cc103eb82c49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.201281] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 841.201511] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 841.201689] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleting the datastore file [datastore2] af3465db-fd56-458d-a499-14df3a0029f0 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.201946] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b4e2bad-01be-4797-87aa-0f42496210d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.206864] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4407d825-aa72-40d5-a032-7e9c4f310cd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.211595] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 841.211595] env[62109]: value = "task-1116563" [ 841.211595] env[62109]: _type = "Task" [ 841.211595] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.245663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3518efcb-19a1-4a76-b604-e73b0aef46f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.248495] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.248962] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Releasing lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.249215] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: c694c178-3894-4997-8e99-8f4900a64848] Received event network-vif-deleted-3d487765-cb55-45bd-b4f2-b2cddcf12cfd {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.249434] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Received event network-vif-plugged-7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.249600] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Acquiring lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.249799] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.250821] env[62109]: DEBUG oslo_concurrency.lockutils [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.250821] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] No waiting events found dispatching network-vif-plugged-7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.250821] env[62109]: WARNING nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Received unexpected event network-vif-plugged-7448d73b-2d36-46d9-9f1f-3ed3ede34226 for instance with vm_state building and task_state spawning. [ 841.250821] env[62109]: DEBUG nova.compute.manager [req-dcb53c24-f521-416a-a174-9aa8b8784f52 req-2a53fbb7-41c2-42f8-baf6-58c7e278a692 service nova] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Received event network-vif-deleted-8c141056-fbc3-4508-a389-9a9ed6566325 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.257325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac10818d-3fa6-489b-8ec1-aacd2649ea75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.272480] env[62109]: DEBUG nova.compute.provider_tree [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.365406] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42e3b851-3a96-49ac-946c-b5316ae6cef6 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.493s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.389966] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077764} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.390645] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.391649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10aed91-1bf4-4e22-ad95-ea60153794d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.419951] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/3da7aca9-5d65-4f5e-b0a3-7cf5308f0384.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.420602] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65584e99-c62e-4404-97ae-a4d9a3a16e10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.444431] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 841.444431] env[62109]: value = "task-1116564" [ 841.444431] env[62109]: _type = "Task" [ 841.444431] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.453667] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.504944] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 841.504944] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e443e3f2-c3d9-47eb-aa67-5870a66670b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.514019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 841.514019] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ade195a7-bce2-4c6c-9d77-6865f78623b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.515767] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 841.515767] env[62109]: value = "task-1116565" [ 841.515767] env[62109]: _type = "Task" [ 841.515767] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.529028] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.529028] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 841.529028] env[62109]: value = "task-1116566" [ 841.529028] env[62109]: _type = "Task" [ 841.529028] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.536812] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116566, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.577822] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.605987] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.606349] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.606536] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.606738] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.606965] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.607168] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.607427] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.607696] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.607905] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.608109] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.608303] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.609272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e05ab0-54fc-4977-84c5-c42939fc992c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.613522] env[62109]: INFO nova.compute.manager [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Detaching volume fbb52df2-cecd-40ba-89ef-7b4d6f79e515 [ 841.623535] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd15ba5-57ce-41e4-b8c6-5bf8ba6e1f18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.676453] env[62109]: INFO nova.virt.block_device [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Attempting to driver detach volume fbb52df2-cecd-40ba-89ef-7b4d6f79e515 from mountpoint /dev/sdb [ 841.676662] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 841.676804] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244418', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'name': 'volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1399f618-3a93-4731-a59b-f98306d6cd52', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'serial': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 841.677776] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed7615a-8341-4e34-bc3c-bdcf701df54e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.706514] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1bbe55-9298-4ddd-8296-23420854d2fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.712592] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116562, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.721278] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caecea27-7e0b-4882-969e-d5faedc3e972 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.728501] env[62109]: DEBUG oslo_vmware.api [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140016} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.729231] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.729942] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 841.729942] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 841.729942] env[62109]: INFO nova.compute.manager [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 841.730230] env[62109]: DEBUG oslo.service.loopingcall [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.730451] env[62109]: DEBUG nova.compute.manager [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.730551] env[62109]: DEBUG nova.network.neutron [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 841.751248] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73abe7fb-a910-4f68-aba2-b8e4eaaee90b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.774204] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] The volume has not been displaced from its original location: [datastore2] volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515/volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 841.778493] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfiguring VM instance instance-00000026 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 841.779492] env[62109]: DEBUG nova.scheduler.client.report [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.782859] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-559c1a46-9764-4270-b41e-f311f678dd6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.808638] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 841.808638] env[62109]: value = "task-1116567" [ 841.808638] env[62109]: _type = "Task" [ 841.808638] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.820267] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116567, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.867604] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.877743] env[62109]: DEBUG nova.compute.manager [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Received event network-changed-7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.877966] env[62109]: DEBUG nova.compute.manager [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Refreshing instance network info cache due to event network-changed-7448d73b-2d36-46d9-9f1f-3ed3ede34226. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 841.878451] env[62109]: DEBUG oslo_concurrency.lockutils [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] Acquiring lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.878558] env[62109]: DEBUG oslo_concurrency.lockutils [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] Acquired lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.878749] env[62109]: DEBUG nova.network.neutron [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Refreshing network info cache for port 7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.956438] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116564, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.965797] env[62109]: DEBUG nova.compute.manager [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received event network-changed-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.966066] env[62109]: DEBUG nova.compute.manager [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing instance network info cache due to event network-changed-a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 841.966325] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] Acquiring lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.966681] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] Acquired lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.966681] env[62109]: DEBUG nova.network.neutron [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing network info cache for port a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 842.029395] env[62109]: DEBUG oslo_vmware.api [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116565, 'name': PowerOnVM_Task, 'duration_secs': 0.413315} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.033725] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 842.033725] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bb952d-cbc7-4b15-bfea-f6683bdbe408 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance '32cccd30-278c-48b6-8855-5cd76c2da057' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 842.045947] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116566, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.196203] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116562, 'name': CreateVM_Task, 'duration_secs': 0.573698} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.196408] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 842.197262] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.197422] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.197798] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.198156] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1417d8f-acfc-4f0d-8668-d205f4a42260 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.204634] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 842.204634] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f7a3dc-c798-886c-d1e6-7d13c1b3024b" [ 842.204634] env[62109]: _type = "Task" [ 842.204634] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.219031] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f7a3dc-c798-886c-d1e6-7d13c1b3024b, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.219408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.219655] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.219890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.220056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.220298] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.220617] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1eb108c6-bfa7-4837-a3db-07f8a5e15bbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.231806] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.232073] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 842.233224] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea0f591f-4d50-4783-afea-7c103b4c7175 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.241582] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 842.241582] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520f09a6-ee99-6bf9-8bbc-bc734ac624de" [ 842.241582] env[62109]: _type = "Task" [ 842.241582] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.252610] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520f09a6-ee99-6bf9-8bbc-bc734ac624de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.301558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.301558] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 842.304454] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.243s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.305783] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.307200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.307459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.307663] env[62109]: INFO nova.compute.manager [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Successfully reverted task state from image_uploading on failure for instance. [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server [None req-0a1a4ce9-6090-48a5-ad55-7314322172c9 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-244416' has already been deleted or has not been completely created [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 842.311931] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 842.312367] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4442, in snapshot_instance [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4475, in _snapshot_instance [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 565, in snapshot [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1034, in snapshot [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 842.312793] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 937, in _delete_vm_snapshot [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server self._session._wait_for_task(delete_snapshot_task) [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server return self.wait_for_task(task_ref) [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server return evt.wait() [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server self.f(*self.args, **self.kw) [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 842.313251] env[62109]: ERROR oslo_messaging.rpc.server raise exceptions.translate_fault(task_info.error) [ 842.313664] env[62109]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-244416' has already been deleted or has not been completely created [ 842.313664] env[62109]: ERROR oslo_messaging.rpc.server [ 842.313664] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.743s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.313664] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.315863] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.317661] env[62109]: INFO nova.compute.claims [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.335380] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116567, 'name': ReconfigVM_Task, 'duration_secs': 0.447612} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.335380] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Reconfigured VM instance instance-00000026 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 842.341631] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3083e8b0-6348-4e31-9aab-6ece54266c40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.354111] env[62109]: INFO nova.scheduler.client.report [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted allocations for instance c5c63ece-611d-45d1-a8e6-9327700f1563 [ 842.363120] env[62109]: INFO nova.scheduler.client.report [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleted allocations for instance a9fb75d5-e303-4f31-888d-528963ab23b7 [ 842.376201] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 842.376201] env[62109]: value = "task-1116568" [ 842.376201] env[62109]: _type = "Task" [ 842.376201] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.393735] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116568, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.397567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.459127] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116564, 'name': ReconfigVM_Task, 'duration_secs': 0.760277} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.459229] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/3da7aca9-5d65-4f5e-b0a3-7cf5308f0384.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.460483] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-514eed94-fd55-4efb-a609-7dac4028599a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.469598] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 842.469598] env[62109]: value = "task-1116569" [ 842.469598] env[62109]: _type = "Task" [ 842.469598] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.482186] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116569, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.543768] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116566, 'name': CreateSnapshot_Task, 'duration_secs': 0.779246} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.544406] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 842.545034] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b77199-0b5d-4391-b5cb-f5032f7dfc35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.548252] env[62109]: DEBUG nova.network.neutron [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.638977] env[62109]: INFO nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Rebuilding instance [ 842.695104] env[62109]: DEBUG nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.696325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0350eba1-83a0-493b-9576-35b10b608865 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.755968] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520f09a6-ee99-6bf9-8bbc-bc734ac624de, 'name': SearchDatastore_Task, 'duration_secs': 0.012} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.759222] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b8d3195-d93f-41c4-b52c-0c7cbcb8cce0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.771185] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 842.771185] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3e0e2-68c3-93a3-d4e8-c49c9e8bb695" [ 842.771185] env[62109]: _type = "Task" [ 842.771185] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.778357] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3e0e2-68c3-93a3-d4e8-c49c9e8bb695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.805925] env[62109]: DEBUG nova.compute.utils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.807930] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 842.808172] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 842.872981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c8d7da10-aa9f-4b8e-9bf8-30f4ee5306bb tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "c5c63ece-611d-45d1-a8e6-9327700f1563" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.680s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.874783] env[62109]: DEBUG oslo_concurrency.lockutils [None req-334261ea-94dd-4f75-a78f-2384c67a9c3c tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "a9fb75d5-e303-4f31-888d-528963ab23b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.298s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.889733] env[62109]: DEBUG oslo_vmware.api [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116568, 'name': ReconfigVM_Task, 'duration_secs': 0.174946} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.890116] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244418', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'name': 'volume-fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1399f618-3a93-4731-a59b-f98306d6cd52', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515', 'serial': 'fbb52df2-cecd-40ba-89ef-7b4d6f79e515'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 842.896801] env[62109]: DEBUG nova.policy [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '741d963eb6fe473db210b0d6956e8193', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5435b5d2a57a47a9a087b0f466ed33b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 842.982928] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116569, 'name': Rename_Task, 'duration_secs': 0.16822} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.983335] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 842.983500] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acb09d87-c281-441a-a90b-b8c33d1d8a1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.991926] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 842.991926] env[62109]: value = "task-1116570" [ 842.991926] env[62109]: _type = "Task" [ 842.991926] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.998020] env[62109]: DEBUG nova.network.neutron [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Updated VIF entry in instance network info cache for port 7448d73b-2d36-46d9-9f1f-3ed3ede34226. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 842.998404] env[62109]: DEBUG nova.network.neutron [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Updating instance_info_cache with network_info: [{"id": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "address": "fa:16:3e:7b:84:90", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7448d73b-2d", "ovs_interfaceid": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.003304] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.054459] env[62109]: INFO nova.compute.manager [-] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Took 1.32 seconds to deallocate network for instance. [ 843.074111] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 843.074570] env[62109]: DEBUG nova.network.neutron [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updated VIF entry in instance network info cache for port a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 843.074892] env[62109]: DEBUG nova.network.neutron [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [{"id": "a06370a9-effe-4205-85fa-bfa658250da0", "address": "fa:16:3e:3d:f3:6c", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06370a9-ef", "ovs_interfaceid": "a06370a9-effe-4205-85fa-bfa658250da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.076578] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0ad3701f-3c38-4988-9a74-cfe84f9aba9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.087297] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 843.087297] env[62109]: value = "task-1116571" [ 843.087297] env[62109]: _type = "Task" [ 843.087297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.096643] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116571, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.143709] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Successfully updated port: 70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.210143] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.211063] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fbc85bd-6b16-4351-ad16-5c1de9f99b10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.220233] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 843.220233] env[62109]: value = "task-1116572" [ 843.220233] env[62109]: _type = "Task" [ 843.220233] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.230481] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.282974] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3e0e2-68c3-93a3-d4e8-c49c9e8bb695, 'name': SearchDatastore_Task, 'duration_secs': 0.011087} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.285086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.285312] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/58365fb6-a38e-4afa-be36-3cdcdbdbc2b4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 843.287955] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c8c0d3f-ea15-47cd-a354-b6687b83a043 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.297236] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 843.297236] env[62109]: value = "task-1116573" [ 843.297236] env[62109]: _type = "Task" [ 843.297236] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.311755] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 843.318025] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.442492] env[62109]: DEBUG nova.objects.instance [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.504239] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116570, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.508650] env[62109]: DEBUG oslo_concurrency.lockutils [req-a1327b53-9ed8-471b-852b-b1754f739652 req-f989d5ea-a370-4764-a3b7-d7bc202c2089 service nova] Releasing lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.568717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.577127] env[62109]: DEBUG oslo_concurrency.lockutils [req-ce0fa0d3-7bfa-4502-b1ed-40dc163c070d req-889020ef-3c29-4426-bef2-7f1f3dc7c38b service nova] Releasing lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.605365] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116571, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.644585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.644778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.644998] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 843.727382] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Successfully created port: 1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.740096] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 843.740430] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.741404] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447d28f0-e76f-4545-b13b-00030617fdb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.751973] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 843.757675] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c987b044-0e93-45ff-9253-507f7f72455b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.792425] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.792769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.793130] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.793408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.793627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.796767] env[62109]: INFO nova.compute.manager [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Terminating instance [ 843.799168] env[62109]: DEBUG nova.compute.manager [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 843.799382] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.800336] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838a2ccd-f4d9-438d-b7e5-cbe8f8ac7161 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.814636] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489417} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.816733] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/58365fb6-a38e-4afa-be36-3cdcdbdbc2b4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 843.816982] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.820417] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.823290] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a35f1805-a01b-4282-8ef0-78e060fd3e36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.825372] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5f4c9ab-a66b-4486-a324-08dbc0e7903c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.833215] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 843.833215] env[62109]: value = "task-1116576" [ 843.833215] env[62109]: _type = "Task" [ 843.833215] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.834782] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 843.834782] env[62109]: value = "task-1116575" [ 843.834782] env[62109]: _type = "Task" [ 843.834782] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.848393] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.855944] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.856185] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.856374] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.856659] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.857327] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f2ae6c7-4c35-485b-8066-1bfdae16448d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.865464] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 843.865464] env[62109]: value = "task-1116577" [ 843.865464] env[62109]: _type = "Task" [ 843.865464] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.872664] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed646e2-8223-4f69-8b02-80d896a0863a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.880625] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.886585] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f169776a-cd51-4676-9322-99bf866671cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.941842] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bbd98f-eb62-40ef-aa9d-3ae2e5e8c145 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.955464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005023b2-5396-4b65-9716-c749f5e52eaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.973533] env[62109]: DEBUG nova.compute.provider_tree [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.004187] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116570, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.099916] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116571, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.182489] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 844.325779] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 844.348345] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Updating instance_info_cache with network_info: [{"id": "70961368-3e7f-4c05-b619-fdb2dc044a77", "address": "fa:16:3e:ed:cd:c5", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70961368-3e", "ovs_interfaceid": "70961368-3e7f-4c05-b619-fdb2dc044a77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.358041] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118498} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.359024] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116575, 'name': PowerOffVM_Task, 'duration_secs': 0.240192} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.359024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.359298] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 844.359680] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 844.360295] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3173341e-92b7-4092-b667-819b9b610433 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.363920] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca06a37f-7678-426c-a94c-92f6c321680b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.369100] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.369372] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.369582] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.369780] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.370048] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.370125] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.370385] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.370585] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.370821] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.370981] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.371213] env[62109]: DEBUG nova.virt.hardware [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.376353] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc89694-84a3-4343-ba2e-057b9a6aaa71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.406599] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/58365fb6-a38e-4afa-be36-3cdcdbdbc2b4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.408842] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc962846-c9eb-4e7a-ad73-0a5c8b3809f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.435112] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1875} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.436015] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.436269] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 844.436941] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 844.441132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe470c6-0cc4-4a75-8c29-f4ae87a94e1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.447558] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 844.447558] env[62109]: value = "task-1116579" [ 844.447558] env[62109]: _type = "Task" [ 844.447558] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.453320] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 844.453591] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 844.453791] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleting the datastore file [datastore1] 55381bef-dab5-44cd-97fe-9fc75ab61d0e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.464376] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96d9b49f-bf57-453f-ad49-955e860bf98a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.466674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0ac0bdb9-f653-4ae4-b23a-f5ca4e77c32f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.362s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.477123] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.478976] env[62109]: DEBUG nova.scheduler.client.report [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.483914] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for the task: (returnval){ [ 844.483914] env[62109]: value = "task-1116580" [ 844.483914] env[62109]: _type = "Task" [ 844.483914] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.494639] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.507841] env[62109]: DEBUG oslo_vmware.api [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116570, 'name': PowerOnVM_Task, 'duration_secs': 1.23801} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.508135] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 844.508391] env[62109]: INFO nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Took 9.49 seconds to spawn the instance on the hypervisor. [ 844.508617] env[62109]: DEBUG nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 844.509670] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e28fb9-1203-4bc4-87b9-d0e8ecbcca2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.600371] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116571, 'name': CloneVM_Task, 'duration_secs': 1.453634} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.600966] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created linked-clone VM from snapshot [ 844.601532] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df37f3-6d83-414b-8179-f39a45731f4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.609840] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploading image 86f9f547-4757-466b-b9a1-a5222ef70c82 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 844.639983] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 844.639983] env[62109]: value = "vm-244456" [ 844.639983] env[62109]: _type = "VirtualMachine" [ 844.639983] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 844.640306] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ac385148-e91f-45ff-95d6-683820e3fd5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.647820] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease: (returnval){ [ 844.647820] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5c42c-3cf6-6777-58c5-84daff0283bb" [ 844.647820] env[62109]: _type = "HttpNfcLease" [ 844.647820] env[62109]: } obtained for exporting VM: (result){ [ 844.647820] env[62109]: value = "vm-244456" [ 844.647820] env[62109]: _type = "VirtualMachine" [ 844.647820] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 844.648098] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the lease: (returnval){ [ 844.648098] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5c42c-3cf6-6777-58c5-84daff0283bb" [ 844.648098] env[62109]: _type = "HttpNfcLease" [ 844.648098] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 844.655587] env[62109]: DEBUG nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Received event network-vif-deleted-a33eb191-a49c-4a63-8f1a-569b4fcbc346 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.655766] env[62109]: DEBUG nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Received event network-vif-plugged-70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.655961] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Acquiring lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.656402] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.656402] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.656509] env[62109]: DEBUG nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] No waiting events found dispatching network-vif-plugged-70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 844.656681] env[62109]: WARNING nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Received unexpected event network-vif-plugged-70961368-3e7f-4c05-b619-fdb2dc044a77 for instance with vm_state building and task_state spawning. [ 844.656846] env[62109]: DEBUG nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Received event network-changed-70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.657011] env[62109]: DEBUG nova.compute.manager [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Refreshing instance network info cache due to event network-changed-70961368-3e7f-4c05-b619-fdb2dc044a77. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.657229] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Acquiring lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.659373] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 844.659373] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5c42c-3cf6-6777-58c5-84daff0283bb" [ 844.659373] env[62109]: _type = "HttpNfcLease" [ 844.659373] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 844.673956] env[62109]: DEBUG nova.compute.manager [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received event network-changed-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.674235] env[62109]: DEBUG nova.compute.manager [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing instance network info cache due to event network-changed-a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.674235] env[62109]: DEBUG oslo_concurrency.lockutils [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] Acquiring lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.674555] env[62109]: DEBUG oslo_concurrency.lockutils [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] Acquired lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.674598] env[62109]: DEBUG nova.network.neutron [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Refreshing network info cache for port a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 844.851137] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.851474] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Instance network_info: |[{"id": "70961368-3e7f-4c05-b619-fdb2dc044a77", "address": "fa:16:3e:ed:cd:c5", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70961368-3e", "ovs_interfaceid": "70961368-3e7f-4c05-b619-fdb2dc044a77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 844.851826] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Acquired lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.852036] env[62109]: DEBUG nova.network.neutron [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Refreshing network info cache for port 70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 844.855161] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:cd:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f85835c8-5d0c-4b2f-97c4-6c4006580f79', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70961368-3e7f-4c05-b619-fdb2dc044a77', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.864154] env[62109]: DEBUG oslo.service.loopingcall [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.864457] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 844.864713] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e408b64-21a6-4c6a-ab4b-16f229d96757 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.890154] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.890154] env[62109]: value = "task-1116582" [ 844.890154] env[62109]: _type = "Task" [ 844.890154] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.899706] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.899984] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.900194] env[62109]: DEBUG nova.compute.manager [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 844.900440] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116582, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.901569] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0b8333-aa91-438d-811c-78ea33a056ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.908676] env[62109]: DEBUG nova.compute.manager [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 844.909512] env[62109]: DEBUG nova.objects.instance [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.962737] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.987025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.987025] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.989246] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.673s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.990715] env[62109]: INFO nova.compute.claims [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.005825] env[62109]: DEBUG oslo_vmware.api [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Task: {'id': task-1116580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167069} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.005825] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.005825] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 845.005825] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 845.005825] env[62109]: INFO nova.compute.manager [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 1.21 seconds to destroy the instance on the hypervisor. [ 845.006207] env[62109]: DEBUG oslo.service.loopingcall [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.006207] env[62109]: DEBUG nova.compute.manager [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 845.006207] env[62109]: DEBUG nova.network.neutron [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 845.032146] env[62109]: INFO nova.compute.manager [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Took 46.26 seconds to build instance. [ 845.158318] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 845.158318] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5c42c-3cf6-6777-58c5-84daff0283bb" [ 845.158318] env[62109]: _type = "HttpNfcLease" [ 845.158318] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 845.158795] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 845.158795] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5c42c-3cf6-6777-58c5-84daff0283bb" [ 845.158795] env[62109]: _type = "HttpNfcLease" [ 845.158795] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 845.159796] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f3615d-e9b5-4d83-921e-3a07f59d5241 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.168552] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 845.168679] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 845.302338] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-21b4fec6-2d3d-4575-bcc4-c55d5322c18b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.405715] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116582, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.417604] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.417862] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.418447] env[62109]: DEBUG nova.objects.instance [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'flavor' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.421737] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 845.422194] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db9d286f-d9f4-42b9-be67-61aab6eb97a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.433877] env[62109]: DEBUG oslo_vmware.api [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 845.433877] env[62109]: value = "task-1116583" [ 845.433877] env[62109]: _type = "Task" [ 845.433877] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.444419] env[62109]: DEBUG oslo_vmware.api [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.471970] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116579, 'name': ReconfigVM_Task, 'duration_secs': 0.981314} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.474184] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/58365fb6-a38e-4afa-be36-3cdcdbdbc2b4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.474184] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6d65ce3-ee1a-4090-afbb-e9405a4070c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.485552] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 845.485552] env[62109]: value = "task-1116584" [ 845.485552] env[62109]: _type = "Task" [ 845.485552] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.496156] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.496593] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.496782] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.496994] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.497225] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.497397] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.497622] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.497790] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.497959] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.498160] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.498342] env[62109]: DEBUG nova.virt.hardware [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.505178] env[62109]: DEBUG nova.compute.utils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.505178] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b6be44-7867-46b3-8a85-bb10257dcc44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.514686] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.514686] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.515086] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116584, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.527116] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b1c54a-e5db-4f5a-ad9b-1e8e3d682932 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.543052] env[62109]: DEBUG oslo_concurrency.lockutils [None req-43fb4b54-361e-4f6f-ad2b-6d27eea02554 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.782s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.544119] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:f4:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56c50c1b-b3f7-4097-b080-6b487489343b', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.552718] env[62109]: DEBUG oslo.service.loopingcall [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.555848] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 845.556524] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3974e4ad-0b40-4485-a562-9a41a875d7bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.577731] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.577731] env[62109]: value = "task-1116585" [ 845.577731] env[62109]: _type = "Task" [ 845.577731] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.596912] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116585, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.618700] env[62109]: DEBUG nova.policy [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42dfa9389c91465cbd87a83310117faa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb674b9896af406aad0bc08bb8a63c72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.889706] env[62109]: DEBUG nova.network.neutron [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.904828] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116582, 'name': CreateVM_Task, 'duration_secs': 0.590804} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.905735] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 845.907169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.907169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.907889] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.908841] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e306376-bdfe-4453-a21a-3136777cfb8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.920856] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 845.920856] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5276da24-8d47-a41b-03bb-e7d584a15f50" [ 845.920856] env[62109]: _type = "Task" [ 845.920856] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.924101] env[62109]: DEBUG nova.objects.instance [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'pci_requests' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.938746] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5276da24-8d47-a41b-03bb-e7d584a15f50, 'name': SearchDatastore_Task, 'duration_secs': 0.012795} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.945825] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.946595] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.946595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.946595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.946718] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.948016] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efcc512d-d5d3-48d8-b52d-26207e591f4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.959259] env[62109]: DEBUG oslo_vmware.api [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116583, 'name': PowerOffVM_Task, 'duration_secs': 0.224472} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.959774] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 845.960112] env[62109]: DEBUG nova.compute.manager [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 845.961555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590e4656-b5e3-4635-9eb3-d386b74a8e09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.966252] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.966641] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 845.968086] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b61523cb-10be-45f3-91be-7c28602b1017 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.979625] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 845.979625] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52379ab6-3966-3021-a467-01e596399443" [ 845.979625] env[62109]: _type = "Task" [ 845.979625] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.995066] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52379ab6-3966-3021-a467-01e596399443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.003974] env[62109]: DEBUG nova.network.neutron [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updated VIF entry in instance network info cache for port a06370a9-effe-4205-85fa-bfa658250da0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 846.004816] env[62109]: DEBUG nova.network.neutron [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [{"id": "a06370a9-effe-4205-85fa-bfa658250da0", "address": "fa:16:3e:3d:f3:6c", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06370a9-ef", "ovs_interfaceid": "a06370a9-effe-4205-85fa-bfa658250da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.015687] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 846.018366] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116584, 'name': Rename_Task, 'duration_secs': 0.215791} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.019196] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 846.019527] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-985da234-5f2d-49e6-9261-7b5b09d66877 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.029647] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 846.029647] env[62109]: value = "task-1116586" [ 846.029647] env[62109]: _type = "Task" [ 846.029647] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.043434] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.091632] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116585, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.265359] env[62109]: DEBUG nova.network.neutron [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Updated VIF entry in instance network info cache for port 70961368-3e7f-4c05-b619-fdb2dc044a77. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 846.265983] env[62109]: DEBUG nova.network.neutron [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Updating instance_info_cache with network_info: [{"id": "70961368-3e7f-4c05-b619-fdb2dc044a77", "address": "fa:16:3e:ed:cd:c5", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70961368-3e", "ovs_interfaceid": "70961368-3e7f-4c05-b619-fdb2dc044a77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.321136] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Successfully created port: 484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.351352] env[62109]: DEBUG nova.network.neutron [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Port de4056dc-a527-43f0-ad81-f82e5cb00f86 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 846.353377] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.353377] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.353377] env[62109]: DEBUG nova.network.neutron [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.393670] env[62109]: INFO nova.compute.manager [-] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Took 1.39 seconds to deallocate network for instance. [ 846.429099] env[62109]: DEBUG nova.objects.base [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Object Instance<7afbb35b-9865-40a7-8b37-d6a661a186a9> lazy-loaded attributes: flavor,pci_requests {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 846.429159] env[62109]: DEBUG nova.network.neutron [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 846.437971] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Successfully updated port: 1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.485683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aeb33e11-8935-4c35-a7d0-205b4093a741 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.585s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.499944] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52379ab6-3966-3021-a467-01e596399443, 'name': SearchDatastore_Task, 'duration_secs': 0.018696} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.501365] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f9dc282-48b7-4936-b88b-1712b7640374 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.513669] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 846.513669] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b57bf3-4382-a9c2-958d-24864c325a37" [ 846.513669] env[62109]: _type = "Task" [ 846.513669] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.522121] env[62109]: DEBUG oslo_concurrency.lockutils [req-9563cbc5-a217-403c-9d98-409d56ba6c48 req-9de4da51-3ab6-47f5-b446-54354c3bc8c5 service nova] Releasing lock "refresh_cache-7ace6356-1a81-4095-8286-c9b6d829062b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.541066] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b57bf3-4382-a9c2-958d-24864c325a37, 'name': SearchDatastore_Task, 'duration_secs': 0.011661} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.541811] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.545100] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] b1321874-8f97-4444-9f9c-d586d51a9e92/b1321874-8f97-4444-9f9c-d586d51a9e92.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 846.546786] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c73f5a77-d6b1-43e2-b161-e20b5df4b799 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.561052] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116586, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.575061] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 846.575061] env[62109]: value = "task-1116587" [ 846.575061] env[62109]: _type = "Task" [ 846.575061] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.593938] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.598041] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116585, 'name': CreateVM_Task, 'duration_secs': 0.561554} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.599335] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 846.600282] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.600583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.601328] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 846.602027] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12de8224-c09f-40b8-bce0-479166eced55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.611484] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 846.611484] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a4ada7-2a6d-d0c4-dc9e-db1bbde48656" [ 846.611484] env[62109]: _type = "Task" [ 846.611484] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.624959] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a4ada7-2a6d-d0c4-dc9e-db1bbde48656, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.629119] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1ccf6b-123f-42fe-98b9-5882f684a71a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.637693] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5701438-2295-4732-acba-a714e6e9a1fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.676215] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1640058a-a084-42f2-ab65-d90aac3d56d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.686464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0fd1de-5d46-4d2c-89fa-ae314af42119 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.692476] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d18ea02c-27f0-4b3b-a1c3-2d50f383cfd1 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.274s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.710993] env[62109]: DEBUG nova.compute.provider_tree [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.768227] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf3ef248-f84a-4344-acd9-625307e99658 req-dce0e4d9-2a7a-4ba3-8e0e-1c2b157fe9e3 service nova] Releasing lock "refresh_cache-b1321874-8f97-4444-9f9c-d586d51a9e92" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.901749] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.946090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.946728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.946949] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.032206] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 847.053490] env[62109]: DEBUG oslo_vmware.api [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116586, 'name': PowerOnVM_Task, 'duration_secs': 0.673041} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.054048] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 847.058033] env[62109]: INFO nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Took 9.36 seconds to spawn the instance on the hypervisor. [ 847.058033] env[62109]: DEBUG nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.062378] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6646424e-0d19-41dc-90b8-1b3d118f1304 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.083744] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.084284] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.084484] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.084738] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.085026] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.085268] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.085704] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.085984] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.086211] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.086592] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.086955] env[62109]: DEBUG nova.virt.hardware [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.093653] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8914344-a2f8-4085-9561-ffccc620f964 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.111440] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1699fa-d1c2-4511-82c1-169132256ec7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.117266] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116587, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.145948] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a4ada7-2a6d-d0c4-dc9e-db1bbde48656, 'name': SearchDatastore_Task, 'duration_secs': 0.026696} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.145948] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.145948] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.146269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.146683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.146937] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.147336] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8260c586-b1a0-43f1-8765-7ec0d431c779 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.157667] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.157966] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 847.159625] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7e5f1a9-46f9-4d6e-8e21-f92701cf3c26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.167663] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 847.167663] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528e16ee-ce29-7dde-4328-a858117e765a" [ 847.167663] env[62109]: _type = "Task" [ 847.167663] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.176735] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528e16ee-ce29-7dde-4328-a858117e765a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.177934] env[62109]: DEBUG nova.network.neutron [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.213744] env[62109]: DEBUG nova.scheduler.client.report [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.491425] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.596674] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116587, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.597305] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] b1321874-8f97-4444-9f9c-d586d51a9e92/b1321874-8f97-4444-9f9c-d586d51a9e92.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 847.597739] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.601867] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-286bb474-f598-45b6-99e2-1032e8470865 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.606848] env[62109]: INFO nova.compute.manager [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Took 47.24 seconds to build instance. [ 847.614665] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 847.614665] env[62109]: value = "task-1116588" [ 847.614665] env[62109]: _type = "Task" [ 847.614665] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.628111] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.660565] env[62109]: DEBUG nova.network.neutron [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Updating instance_info_cache with network_info: [{"id": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "address": "fa:16:3e:ff:31:48", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0bfd82-ef", "ovs_interfaceid": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.681832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.684117] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528e16ee-ce29-7dde-4328-a858117e765a, 'name': SearchDatastore_Task, 'duration_secs': 0.009629} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.685099] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13cf0123-36c0-42ff-acb8-39bc8f24604a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.693037] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 847.693037] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52731f7b-4a28-d267-c4ab-9f323cec5590" [ 847.693037] env[62109]: _type = "Task" [ 847.693037] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.704122] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52731f7b-4a28-d267-c4ab-9f323cec5590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.719505] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.720119] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.722912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.130s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.723178] env[62109]: DEBUG nova.objects.instance [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'resources' on Instance uuid 59f6adc7-d491-4a86-83f7-89128511e00f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.949729] env[62109]: DEBUG nova.compute.manager [req-ecb0f473-dddb-444f-84cd-a8254285960b req-195445a9-c8f8-4644-8a78-e544e1162489 service nova] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Received event network-vif-deleted-c53b0b3c-0c89-4d69-b2f7-0e56f6351f7a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.964785] env[62109]: DEBUG nova.compute.manager [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Received event network-vif-plugged-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.964785] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Acquiring lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.965081] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.965185] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.965386] env[62109]: DEBUG nova.compute.manager [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] No waiting events found dispatching network-vif-plugged-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.965686] env[62109]: WARNING nova.compute.manager [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Received unexpected event network-vif-plugged-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 for instance with vm_state building and task_state spawning. [ 847.965869] env[62109]: DEBUG nova.compute.manager [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Received event network-changed-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.966021] env[62109]: DEBUG nova.compute.manager [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Refreshing instance network info cache due to event network-changed-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.966247] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Acquiring lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.103586] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Successfully updated port: 484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.110100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c306f0a5-8827-4f34-b2e9-7184d93ff70b tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.754s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.133114] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204111} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.133450] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.134251] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0fc0e6-d87b-42c1-97c2-8bd06c575d48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.164277] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] b1321874-8f97-4444-9f9c-d586d51a9e92/b1321874-8f97-4444-9f9c-d586d51a9e92.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.164277] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.164468] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Instance network_info: |[{"id": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "address": "fa:16:3e:ff:31:48", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0bfd82-ef", "ovs_interfaceid": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 848.164468] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b77bd49-a390-49c1-b2a1-7d3d338cee47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.183069] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Acquired lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.183374] env[62109]: DEBUG nova.network.neutron [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Refreshing network info cache for port 1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 848.184735] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:31:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f85835c8-5d0c-4b2f-97c4-6c4006580f79', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.193351] env[62109]: DEBUG oslo.service.loopingcall [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.194939] env[62109]: DEBUG nova.compute.manager [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62109) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 848.195158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.195516] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 848.200392] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0abd69f2-cbd3-4f4a-b590-c6bbfcf53aa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.219257] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 848.219257] env[62109]: value = "task-1116589" [ 848.219257] env[62109]: _type = "Task" [ 848.219257] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.227546] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52731f7b-4a28-d267-c4ab-9f323cec5590, 'name': SearchDatastore_Task, 'duration_secs': 0.011592} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.229999] env[62109]: DEBUG nova.compute.utils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 848.232106] env[62109]: DEBUG nova.objects.instance [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'numa_topology' on Instance uuid 59f6adc7-d491-4a86-83f7-89128511e00f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.236021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.236021] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 848.236021] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.236021] env[62109]: value = "task-1116590" [ 848.236021] env[62109]: _type = "Task" [ 848.236021] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.237471] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 848.237688] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 848.241725] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b87fdf7a-584d-4f31-ad54-935ffdb7d1a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.242621] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116589, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.257472] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116590, 'name': CreateVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.259326] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 848.259326] env[62109]: value = "task-1116591" [ 848.259326] env[62109]: _type = "Task" [ 848.259326] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.267983] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.302156] env[62109]: DEBUG nova.policy [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2988618e18934aa6b85d2ea288917ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275238e3083540aa838de6d5cccf61eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 848.608579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.609017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.609078] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.706444] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Successfully created port: 0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.734310] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116589, 'name': ReconfigVM_Task, 'duration_secs': 0.471149} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.734626] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Reconfigured VM instance instance-00000042 to attach disk [datastore1] b1321874-8f97-4444-9f9c-d586d51a9e92/b1321874-8f97-4444-9f9c-d586d51a9e92.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.735764] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06435c46-27d4-4a6e-bb39-7d7b3c9099b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.738509] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 848.746388] env[62109]: DEBUG nova.objects.base [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Object Instance<59f6adc7-d491-4a86-83f7-89128511e00f> lazy-loaded attributes: resources,numa_topology {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 848.762213] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 848.762213] env[62109]: value = "task-1116592" [ 848.762213] env[62109]: _type = "Task" [ 848.762213] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.773627] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116590, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.777854] env[62109]: INFO nova.compute.manager [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Rescuing [ 848.778139] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.778350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.778611] env[62109]: DEBUG nova.network.neutron [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.788324] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116591, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.796117] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116592, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.032066] env[62109]: DEBUG nova.objects.instance [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.069030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.069465] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.069629] env[62109]: DEBUG nova.objects.instance [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'flavor' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.111773] env[62109]: DEBUG nova.network.neutron [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Updated VIF entry in instance network info cache for port 1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 849.112794] env[62109]: DEBUG nova.network.neutron [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Updating instance_info_cache with network_info: [{"id": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "address": "fa:16:3e:ff:31:48", "network": {"id": "99854997-f910-4858-a446-ecc6781e679e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1633145076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5435b5d2a57a47a9a087b0f466ed33b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0bfd82-ef", "ovs_interfaceid": "1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.128021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "6f31405e-a766-46da-8bf9-7be37a323bf3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.128021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.164673] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.238604] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6225137b-1b89-45b7-bc87-00fce9c7d061 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.247842] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c822b02-1691-4832-b912-fe95155fd24b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.300964] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116590, 'name': CreateVM_Task, 'duration_secs': 0.624158} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.307279] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987ff0ad-d816-4007-baf1-c2ad3643b160 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.310195] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 849.314612] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.314827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.315175] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 849.315767] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569551} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.316683] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ffe555c-f982-4669-b06e-41a32cf1efba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.318647] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 849.318868] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.327358] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc191b24-12c6-439e-8c7e-33b2f4405a80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.330363] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116592, 'name': Rename_Task, 'duration_secs': 0.28943} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.331144] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 849.332507] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c019ca4-444d-4fa3-b5ae-d5552fe3052d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.338553] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a134d3e-f231-45c3-8f1f-37653d30cb55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.341334] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 849.341334] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52abe9f8-0d9b-d2b0-2b0d-1a958b7f7d74" [ 849.341334] env[62109]: _type = "Task" [ 849.341334] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.358265] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 849.358265] env[62109]: value = "task-1116594" [ 849.358265] env[62109]: _type = "Task" [ 849.358265] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.358694] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 849.358694] env[62109]: value = "task-1116593" [ 849.358694] env[62109]: _type = "Task" [ 849.358694] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.359682] env[62109]: DEBUG nova.compute.provider_tree [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.375755] env[62109]: DEBUG nova.network.neutron [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Updating instance_info_cache with network_info: [{"id": "484de316-0fb7-44a9-b071-2c442b1388ed", "address": "fa:16:3e:a8:8c:84", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap484de316-0f", "ovs_interfaceid": "484de316-0fb7-44a9-b071-2c442b1388ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.381024] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52abe9f8-0d9b-d2b0-2b0d-1a958b7f7d74, 'name': SearchDatastore_Task, 'duration_secs': 0.011333} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.383068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.383374] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.383657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.383964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.384247] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.390667] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45a838d9-d579-41c8-8b1a-9d20d29667d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.392887] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.398008] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.408018] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.408304] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 849.409198] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b40349b3-9259-4235-b726-53a2e0fe53fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.418462] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 849.418462] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a58816-f02a-7de4-0055-a9ec0d0cec23" [ 849.418462] env[62109]: _type = "Task" [ 849.418462] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.428459] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a58816-f02a-7de4-0055-a9ec0d0cec23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.541479] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.541722] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.541908] env[62109]: DEBUG nova.network.neutron [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.542126] env[62109]: DEBUG nova.objects.instance [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'info_cache' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.620694] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d645f86-2f7d-43c2-9065-02ee2b254a6a req-2a608300-4594-4541-97f6-44e2715b8610 service nova] Releasing lock "refresh_cache-d7d1029c-9b7c-4bd7-b606-a1962a129461" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.630451] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 849.659463] env[62109]: DEBUG nova.objects.instance [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'pci_requests' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.690867] env[62109]: DEBUG nova.network.neutron [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Updating instance_info_cache with network_info: [{"id": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "address": "fa:16:3e:7b:84:90", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7448d73b-2d", "ovs_interfaceid": "7448d73b-2d36-46d9-9f1f-3ed3ede34226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.763636] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 849.788070] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.788294] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.788529] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.788777] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.788965] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.789204] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.789503] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.789729] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.789967] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.790255] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.790685] env[62109]: DEBUG nova.virt.hardware [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.791777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ddeaee-a245-4123-ae38-6d2f6e935e01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.802963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3c8025-6332-4097-a843-a0899d4c0a23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.864729] env[62109]: DEBUG nova.scheduler.client.report [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.878746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.879169] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Instance network_info: |[{"id": "484de316-0fb7-44a9-b071-2c442b1388ed", "address": "fa:16:3e:a8:8c:84", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap484de316-0f", "ovs_interfaceid": "484de316-0fb7-44a9-b071-2c442b1388ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 849.883396] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:8c:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91b0f7e5-0d1a-46e2-bf73-09656211dea2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '484de316-0fb7-44a9-b071-2c442b1388ed', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.893230] env[62109]: DEBUG oslo.service.loopingcall [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.893541] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079944} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.898497] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 849.898784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.899093] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.899325] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-003f2e63-ca9c-4337-9583-648cf0e8fc19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.915030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba8aea7-b941-4cad-9ff7-bacd8a650044 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.942100] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.944413] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ecf94b5-2c97-4dc2-8970-a780f80a1a0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.958870] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.958870] env[62109]: value = "task-1116595" [ 849.958870] env[62109]: _type = "Task" [ 849.958870] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.963374] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a58816-f02a-7de4-0055-a9ec0d0cec23, 'name': SearchDatastore_Task, 'duration_secs': 0.011399} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.967570] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eccfac9-3281-4661-b5c0-0dccc8d6026c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.972334] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 849.972334] env[62109]: value = "task-1116596" [ 849.972334] env[62109]: _type = "Task" [ 849.972334] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.980617] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116595, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.980975] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 849.980975] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527761a5-264f-a714-6e5d-e8f3100e369c" [ 849.980975] env[62109]: _type = "Task" [ 849.980975] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.988009] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116596, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.994386] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527761a5-264f-a714-6e5d-e8f3100e369c, 'name': SearchDatastore_Task, 'duration_secs': 0.011225} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.994654] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.994917] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d7d1029c-9b7c-4bd7-b606-a1962a129461/d7d1029c-9b7c-4bd7-b606-a1962a129461.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 849.995256] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97588e18-1fa7-45de-8419-428f5678f91d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.004207] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 850.004207] env[62109]: value = "task-1116597" [ 850.004207] env[62109]: _type = "Task" [ 850.004207] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.016142] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.045923] env[62109]: DEBUG nova.objects.base [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Object Instance<1399f618-3a93-4731-a59b-f98306d6cd52> lazy-loaded attributes: flavor,info_cache {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 850.160314] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.162100] env[62109]: DEBUG nova.objects.base [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Object Instance<7afbb35b-9865-40a7-8b37-d6a661a186a9> lazy-loaded attributes: flavor,pci_requests {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 850.162402] env[62109]: DEBUG nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 850.193994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.221762] env[62109]: DEBUG nova.policy [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.376778] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.378075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.655s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.381732] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.625s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.476979] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116595, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.490157] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116596, 'name': ReconfigVM_Task, 'duration_secs': 0.48133} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.491048] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960/5d656f91-d35f-45e1-8892-7cdacd306960.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.491713] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f8b7974-57ac-46b4-91f5-bfab142462a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.503262] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 850.503262] env[62109]: value = "task-1116598" [ 850.503262] env[62109]: _type = "Task" [ 850.503262] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.523650] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116597, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.532062] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116598, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.558409] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Successfully updated port: 0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.710016] env[62109]: DEBUG nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Successfully created port: 9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.730045] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 850.730570] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aefc7c7b-ed81-4e58-aac7-86da62e8da59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.738982] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 850.738982] env[62109]: value = "task-1116599" [ 850.738982] env[62109]: _type = "Task" [ 850.738982] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.754826] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.827368] env[62109]: DEBUG nova.network.neutron [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [{"id": "76f15b7e-4103-4568-8042-248ee15513dc", "address": "fa:16:3e:81:0c:19", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f15b7e-41", "ovs_interfaceid": "76f15b7e-4103-4568-8042-248ee15513dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.876550] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.880566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fe409b-7d98-4ede-9655-484d04609781 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.889652] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857bd357-4d5b-4fc7-bbd8-f6127f331511 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.895184] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b907da22-0a48-441e-b735-8a58c10ed7a6 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 46.579s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.896207] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 27.387s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.896895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.896895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.896895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.927665] env[62109]: INFO nova.compute.manager [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Terminating instance [ 850.930634] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57071ae7-a48b-4307-90c1-40530b0c5d7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.933243] env[62109]: DEBUG nova.compute.manager [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.933243] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 850.933461] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-847c017e-c5d9-4a95-b58d-d1d8bd143ac5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.942676] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e11487-5d78-4658-a2d5-a5da349a5bc0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.949549] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368412f7-1bdf-48a2-9235-b49d51e98d34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.972074] env[62109]: DEBUG nova.compute.provider_tree [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.988994] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59f6adc7-d491-4a86-83f7-89128511e00f could not be found. [ 850.989217] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 850.989406] env[62109]: INFO nova.compute.manager [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Took 0.06 seconds to destroy the instance on the hypervisor. [ 850.989685] env[62109]: DEBUG oslo.service.loopingcall [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.990687] env[62109]: DEBUG nova.scheduler.client.report [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.997031] env[62109]: DEBUG nova.compute.manager [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.997031] env[62109]: DEBUG nova.network.neutron [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 851.004612] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116595, 'name': CreateVM_Task, 'duration_secs': 0.693551} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.008728] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 851.009377] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.009490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.009759] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 851.010965] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88dcc423-2ef8-4f3b-99cb-d1b018b37aff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.019753] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116598, 'name': Rename_Task, 'duration_secs': 0.211502} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.021514] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 851.021862] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 851.021862] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbb2c7-70da-1a72-bc14-561d6946a2d3" [ 851.021862] env[62109]: _type = "Task" [ 851.021862] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.022074] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31864d81-c06a-4a72-980c-fa53bc7eb42e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.030182] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60704} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.030835] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d7d1029c-9b7c-4bd7-b606-a1962a129461/d7d1029c-9b7c-4bd7-b606-a1962a129461.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 851.031083] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 851.031641] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81f197f7-87a5-433e-9ae7-716d5f651aa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.037100] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbb2c7-70da-1a72-bc14-561d6946a2d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.038507] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 851.038507] env[62109]: value = "task-1116600" [ 851.038507] env[62109]: _type = "Task" [ 851.038507] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.044559] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 851.044559] env[62109]: value = "task-1116601" [ 851.044559] env[62109]: _type = "Task" [ 851.044559] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.064018] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.064447] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.064779] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.064895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.065068] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.144525] env[62109]: DEBUG nova.compute.manager [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Received event network-vif-plugged-484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.144772] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Acquiring lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.145021] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.145182] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.145368] env[62109]: DEBUG nova.compute.manager [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] No waiting events found dispatching network-vif-plugged-484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 851.145628] env[62109]: WARNING nova.compute.manager [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Received unexpected event network-vif-plugged-484de316-0fb7-44a9-b071-2c442b1388ed for instance with vm_state building and task_state spawning. [ 851.145813] env[62109]: DEBUG nova.compute.manager [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Received event network-changed-484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.145973] env[62109]: DEBUG nova.compute.manager [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Refreshing instance network info cache due to event network-changed-484de316-0fb7-44a9-b071-2c442b1388ed. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 851.146605] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Acquiring lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.146765] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Acquired lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.146935] env[62109]: DEBUG nova.network.neutron [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Refreshing network info cache for port 484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 851.253407] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.332214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "refresh_cache-1399f618-3a93-4731-a59b-f98306d6cd52" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.374919] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.498039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.116s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.498039] env[62109]: INFO nova.compute.manager [None req-53f96677-3f95-4475-8974-62a2d1185e64 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Successfully reverted task state from rebuilding on failure for instance. [ 851.504234] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.329s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.504583] env[62109]: DEBUG nova.objects.instance [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lazy-loading 'resources' on Instance uuid a24f2349-7c1b-441d-a36e-b16dd61f6031 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.536033] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbb2c7-70da-1a72-bc14-561d6946a2d3, 'name': SearchDatastore_Task, 'duration_secs': 0.039106} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.536033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.536262] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.536571] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.536769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.536999] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.537355] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-068c84f6-55b8-4c24-98b2-6ac809b5a9c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.549914] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116600, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.554589] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.554845] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 851.555670] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62c5333b-524d-4b3e-90a8-b2fa4f8897bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.561717] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 851.561717] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524df489-f6ed-2a63-ecf7-2488bfec45a2" [ 851.561717] env[62109]: _type = "Task" [ 851.561717] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.565084] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.2983} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.568319] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.570981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ce4a54-4013-45b7-af5d-e8612f5c12fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.579809] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524df489-f6ed-2a63-ecf7-2488bfec45a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010223} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.600087] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] d7d1029c-9b7c-4bd7-b606-a1962a129461/d7d1029c-9b7c-4bd7-b606-a1962a129461.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.601067] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dad495e-4865-4584-84b0-6cdc56b824e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.603126] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f6f7b9a-42ab-4fe9-976a-2c8e0baccc14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.621209] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.628336] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 851.628336] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a815d-6ae2-779f-d0f4-7b357593a607" [ 851.628336] env[62109]: _type = "Task" [ 851.628336] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.634507] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 851.634507] env[62109]: value = "task-1116602" [ 851.634507] env[62109]: _type = "Task" [ 851.634507] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.642975] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a815d-6ae2-779f-d0f4-7b357593a607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.654373] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116602, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.742055] env[62109]: DEBUG nova.network.neutron [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.761325] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116599, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.835376] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 851.836140] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1d2f6b6-4c12-4a23-b095-a22cce2633a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.853418] env[62109]: DEBUG oslo_vmware.api [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 851.853418] env[62109]: value = "task-1116603" [ 851.853418] env[62109]: _type = "Task" [ 851.853418] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.865359] env[62109]: DEBUG oslo_vmware.api [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.877291] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116594, 'name': PowerOnVM_Task, 'duration_secs': 2.455555} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.877291] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 851.877291] env[62109]: INFO nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Took 10.30 seconds to spawn the instance on the hypervisor. [ 851.877291] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.877291] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436f0399-dd62-49de-b9fa-2aba8cbc73c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.939090] env[62109]: DEBUG nova.network.neutron [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Updating instance_info_cache with network_info: [{"id": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "address": "fa:16:3e:8c:ee:2d", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b15c050-09", "ovs_interfaceid": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.019722] env[62109]: DEBUG nova.network.neutron [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Updated VIF entry in instance network info cache for port 484de316-0fb7-44a9-b071-2c442b1388ed. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 852.019994] env[62109]: DEBUG nova.network.neutron [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Updating instance_info_cache with network_info: [{"id": "484de316-0fb7-44a9-b071-2c442b1388ed", "address": "fa:16:3e:a8:8c:84", "network": {"id": "35f9f3e9-a618-43e3-ac76-30778c802026", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-152247776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb674b9896af406aad0bc08bb8a63c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap484de316-0f", "ovs_interfaceid": "484de316-0fb7-44a9-b071-2c442b1388ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.050855] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116600, 'name': PowerOnVM_Task, 'duration_secs': 0.90106} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.051206] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 852.051523] env[62109]: DEBUG nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 852.052223] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b6a85a-5673-4b89-b1d3-1b3bcb17a566 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.143366] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a815d-6ae2-779f-d0f4-7b357593a607, 'name': SearchDatastore_Task, 'duration_secs': 0.013461} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.146564] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.146908] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 0f197e98-9630-4928-8707-56bbf6c1e5a1/0f197e98-9630-4928-8707-56bbf6c1e5a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 852.148389] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f9a47d3-64fd-4a73-9034-6ded19f8ab88 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.153493] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116602, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.158756] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 852.158756] env[62109]: value = "task-1116604" [ 852.158756] env[62109]: _type = "Task" [ 852.158756] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.172310] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.251570] env[62109]: INFO nova.compute.manager [-] [instance: 59f6adc7-d491-4a86-83f7-89128511e00f] Took 1.26 seconds to deallocate network for instance. [ 852.258396] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116599, 'name': PowerOffVM_Task, 'duration_secs': 1.151206} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.261614] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.262591] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef846e7-fbd1-4634-bf0e-4928f5ae281d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.285454] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796a888f-9913-437b-ac71-195d0ee1ae23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.324263] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 852.324263] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee419c12-b343-4069-bf3a-ffeaa8bf2fe8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.331750] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 852.331750] env[62109]: value = "task-1116605" [ 852.331750] env[62109]: _type = "Task" [ 852.331750] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.344523] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 852.344604] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.344871] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.345326] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.345326] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.345533] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c4c6f3b-bfb1-4be3-b917-bf979acbfcc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.367843] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.368047] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 852.368816] env[62109]: DEBUG oslo_vmware.api [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116603, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.369297] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b9e7ead-f103-4dc2-b96a-83038a900005 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.375511] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 852.375511] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fd7ee-def0-596e-277a-d50c19940e5f" [ 852.375511] env[62109]: _type = "Task" [ 852.375511] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.388414] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fd7ee-def0-596e-277a-d50c19940e5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.405302] env[62109]: INFO nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Took 43.42 seconds to build instance. [ 852.443750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.443750] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Instance network_info: |[{"id": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "address": "fa:16:3e:8c:ee:2d", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b15c050-09", "ovs_interfaceid": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 852.444902] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:ee:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b15c050-09d2-478d-b46e-797a5ff6bd05', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.453079] env[62109]: DEBUG oslo.service.loopingcall [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.453377] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 852.453618] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d6f9a00-6f25-4c28-84f3-d8b84095d571 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.483111] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.483111] env[62109]: value = "task-1116606" [ 852.483111] env[62109]: _type = "Task" [ 852.483111] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.496988] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116606, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.502813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e59e7e-452b-4eab-9182-155550aa6ca9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.513069] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04beaef7-00c6-4fb3-8acb-bb9578342cc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.555795] env[62109]: DEBUG oslo_concurrency.lockutils [req-968b12ea-a54b-4584-a0dd-b5ed95bd2195 req-78d3df2a-120e-416b-874c-f92d3aaea3c3 service nova] Releasing lock "refresh_cache-0f197e98-9630-4928-8707-56bbf6c1e5a1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.557616] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3225f117-6efc-4b3d-8b46-dacdec57131f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.565304] env[62109]: INFO nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] bringing vm to original state: 'stopped' [ 852.574330] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7e4a75-cf06-4c33-9425-c6200ccb67b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.594885] env[62109]: DEBUG nova.compute.provider_tree [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.649699] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116602, 'name': ReconfigVM_Task, 'duration_secs': 0.539708} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.650123] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Reconfigured VM instance instance-00000043 to attach disk [datastore2] d7d1029c-9b7c-4bd7-b606-a1962a129461/d7d1029c-9b7c-4bd7-b606-a1962a129461.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.650831] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ffe3324-b006-4a1b-ba51-e63d20cab478 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.662998] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 852.662998] env[62109]: value = "task-1116607" [ 852.662998] env[62109]: _type = "Task" [ 852.662998] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.677264] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116607, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.681009] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116604, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.738368] env[62109]: DEBUG nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Successfully updated port: 9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.864624] env[62109]: DEBUG oslo_vmware.api [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116603, 'name': PowerOnVM_Task, 'duration_secs': 0.593362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.864916] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 852.865132] env[62109]: DEBUG nova.compute.manager [None req-f820f447-4c48-4c31-9ae4-0675d0f97cf7 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 852.865991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d679b1f-e482-4da1-b496-ed946386d287 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.888215] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fd7ee-def0-596e-277a-d50c19940e5f, 'name': SearchDatastore_Task, 'duration_secs': 0.025044} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.889039] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92d79800-6875-40c6-9887-b8e7ff911e9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.904657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.927s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.912684] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 852.912684] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5208bd28-d52c-7c30-9830-595872655977" [ 852.912684] env[62109]: _type = "Task" [ 852.912684] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.924799] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5208bd28-d52c-7c30-9830-595872655977, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.994099] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116606, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.098211] env[62109]: DEBUG nova.scheduler.client.report [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.177168] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116607, 'name': Rename_Task, 'duration_secs': 0.313133} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.180368] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 853.180645] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619024} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.181165] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a62d305-99eb-4d81-be56-b3554346d449 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.182665] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 0f197e98-9630-4928-8707-56bbf6c1e5a1/0f197e98-9630-4928-8707-56bbf6c1e5a1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 853.182901] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 853.183122] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d66b53b0-4dbd-42ce-8363-09c3ef50dcdd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.190520] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 853.190520] env[62109]: value = "task-1116609" [ 853.190520] env[62109]: _type = "Task" [ 853.190520] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.191788] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 853.191788] env[62109]: value = "task-1116608" [ 853.191788] env[62109]: _type = "Task" [ 853.191788] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.203739] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.206366] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.241440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.241731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.242014] env[62109]: DEBUG nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.283040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5c8b11ca-0f38-4606-a1d8-50fdea85bd9d tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "59f6adc7-d491-4a86-83f7-89128511e00f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.386s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.424581] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5208bd28-d52c-7c30-9830-595872655977, 'name': SearchDatastore_Task, 'duration_secs': 0.031116} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.424940] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.425293] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 853.425588] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e4a1572-79b7-4013-bdea-61503bfee428 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.433886] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 853.433886] env[62109]: value = "task-1116610" [ 853.433886] env[62109]: _type = "Task" [ 853.433886] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.442916] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.495022] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116606, 'name': CreateVM_Task, 'duration_secs': 0.626551} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.495316] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 853.496118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.496307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.496655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.497198] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-877302e6-b64b-4503-ac16-b831eb873721 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.504157] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 853.504157] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbd352-6c72-d4e4-a33d-cc6f66921dac" [ 853.504157] env[62109]: _type = "Task" [ 853.504157] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.513357] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbd352-6c72-d4e4-a33d-cc6f66921dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.541173] env[62109]: DEBUG nova.compute.manager [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-plugged-9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.541401] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.541613] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.541782] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.541953] env[62109]: DEBUG nova.compute.manager [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] No waiting events found dispatching network-vif-plugged-9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 853.542144] env[62109]: WARNING nova.compute.manager [req-6f79f959-3199-4839-b5ae-21a6475cffb3 req-04a08c88-e375-433b-b910-eec4f80660fd service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received unexpected event network-vif-plugged-9b48845a-ae70-493f-8ea7-542088d62859 for instance with vm_state active and task_state None. [ 853.576330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.576590] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.576779] env[62109]: DEBUG nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.577893] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff006aeb-8e3d-4a53-8dc2-6ff1b705c5e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.585924] env[62109]: DEBUG nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 853.588253] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 853.588519] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c07ab91-bffc-4c41-94f4-a16b0d2c9fdd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.597184] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 853.597184] env[62109]: value = "task-1116611" [ 853.597184] env[62109]: _type = "Task" [ 853.597184] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.603145] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.611141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.687s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.611141] env[62109]: DEBUG nova.objects.instance [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lazy-loading 'resources' on Instance uuid c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.611533] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.709191] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077481} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.713289] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.713701] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.714704] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfaa13d3-8f45-474c-bbf5-ca179089c524 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.743616] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 0f197e98-9630-4928-8707-56bbf6c1e5a1/0f197e98-9630-4928-8707-56bbf6c1e5a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.744455] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46c13725-f4f3-4d04-adbb-c2b763525818 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.773336] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 853.773336] env[62109]: value = "task-1116612" [ 853.773336] env[62109]: _type = "Task" [ 853.773336] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.787635] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.834231] env[62109]: WARNING nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] cfbec6c5-3421-476e-aca8-de96e0de15af already exists in list: networks containing: ['cfbec6c5-3421-476e-aca8-de96e0de15af']. ignoring it [ 853.944886] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116610, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485501} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.945379] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 853.946246] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e01387-025d-4710-8cc6-e14d232a06da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.972345] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.975236] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25b25df2-2a36-4823-93db-ad68c88a6e32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.988975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.989262] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.989480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.989730] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.989954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.992499] env[62109]: INFO nova.compute.manager [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Terminating instance [ 853.996174] env[62109]: DEBUG nova.compute.manager [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 853.996174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.996440] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2052d4a7-4a89-44bb-990c-3150dcedf1d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.001859] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 854.001859] env[62109]: value = "task-1116613" [ 854.001859] env[62109]: _type = "Task" [ 854.001859] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.009425] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 854.012751] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efbb26c6-13a0-4329-9fc3-85244c6d99de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.019364] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116613, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.024827] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 854.024827] env[62109]: value = "task-1116614" [ 854.024827] env[62109]: _type = "Task" [ 854.024827] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.025121] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fbd352-6c72-d4e4-a33d-cc6f66921dac, 'name': SearchDatastore_Task, 'duration_secs': 0.014573} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.025568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.025899] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.026200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.026361] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.026558] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.029777] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bfc6354-a98b-4bde-b83f-cfd5c47a046c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.036488] env[62109]: DEBUG nova.compute.manager [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Received event network-vif-plugged-0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.036636] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Acquiring lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.036869] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.037065] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.037264] env[62109]: DEBUG nova.compute.manager [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] No waiting events found dispatching network-vif-plugged-0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.037492] env[62109]: WARNING nova.compute.manager [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Received unexpected event network-vif-plugged-0b15c050-09d2-478d-b46e-797a5ff6bd05 for instance with vm_state building and task_state spawning. [ 854.037744] env[62109]: DEBUG nova.compute.manager [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Received event network-changed-0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.037961] env[62109]: DEBUG nova.compute.manager [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Refreshing instance network info cache due to event network-changed-0b15c050-09d2-478d-b46e-797a5ff6bd05. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 854.038207] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Acquiring lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.038372] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Acquired lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.038740] env[62109]: DEBUG nova.network.neutron [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Refreshing network info cache for port 0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 854.046832] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.047159] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 854.048187] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116614, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.051584] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2f4dd9b-f6af-4f56-89ac-1eeaf8990d48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.060931] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 854.060931] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d39681-5f7f-feef-5738-25d15960b87a" [ 854.060931] env[62109]: _type = "Task" [ 854.060931] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.073033] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d39681-5f7f-feef-5738-25d15960b87a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.110333] env[62109]: DEBUG oslo_vmware.api [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116611, 'name': PowerOffVM_Task, 'duration_secs': 0.227754} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.110630] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.110861] env[62109]: DEBUG nova.compute.manager [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.111684] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6f4ea9-ec65-445d-bb6d-3c3977b852a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.131288] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2039a4d-81b9-4302-8793-c99182c19ce9 tempest-ServerActionsV293TestJSON-1570109202 tempest-ServerActionsV293TestJSON-1570109202-project-member] Lock "a24f2349-7c1b-441d-a36e-b16dd61f6031" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.748s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.213646] env[62109]: DEBUG oslo_vmware.api [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116608, 'name': PowerOnVM_Task, 'duration_secs': 0.78165} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.213646] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 854.213646] env[62109]: INFO nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Took 9.89 seconds to spawn the instance on the hypervisor. [ 854.214408] env[62109]: DEBUG nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.214662] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c38f8a9-35dc-4a35-bd24-819064ca08fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.284664] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116612, 'name': ReconfigVM_Task, 'duration_secs': 0.403503} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.289709] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 0f197e98-9630-4928-8707-56bbf6c1e5a1/0f197e98-9630-4928-8707-56bbf6c1e5a1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.290957] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84586b68-8300-42c8-82f6-caea69277473 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.298710] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 854.298710] env[62109]: value = "task-1116615" [ 854.298710] env[62109]: _type = "Task" [ 854.298710] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.311343] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116615, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.341267] env[62109]: DEBUG nova.network.neutron [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Updated VIF entry in instance network info cache for port 0b15c050-09d2-478d-b46e-797a5ff6bd05. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 854.341673] env[62109]: DEBUG nova.network.neutron [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Updating instance_info_cache with network_info: [{"id": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "address": "fa:16:3e:8c:ee:2d", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b15c050-09", "ovs_interfaceid": "0b15c050-09d2-478d-b46e-797a5ff6bd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.393537] env[62109]: DEBUG nova.network.neutron [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b48845a-ae70-493f-8ea7-542088d62859", "address": "fa:16:3e:65:6d:4f", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b48845a-ae", "ovs_interfaceid": "9b48845a-ae70-493f-8ea7-542088d62859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.517024] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116613, 'name': ReconfigVM_Task, 'duration_secs': 0.500919} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.517024] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.517024] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74c1e4b-3073-487b-b0a5-e05d965bdbc6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.553349] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72e15f07-f9ac-422e-bfa8-3a52eb9adfa3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.575032] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116614, 'name': PowerOffVM_Task, 'duration_secs': 0.270171} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.577074] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.577382] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.577724] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 854.577724] env[62109]: value = "task-1116616" [ 854.577724] env[62109]: _type = "Task" [ 854.577724] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.581074] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dba249be-34ad-4a09-803a-c87d7f65cef6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.591316] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d39681-5f7f-feef-5738-25d15960b87a, 'name': SearchDatastore_Task, 'duration_secs': 0.012293} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.593139] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea1b7d63-d7b3-4837-9398-6f9518288da7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.603785] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116616, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.607615] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 854.607615] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b7bf6a-ea92-4d63-6439-a5db1daee9c3" [ 854.607615] env[62109]: _type = "Task" [ 854.607615] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.620731] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b7bf6a-ea92-4d63-6439-a5db1daee9c3, 'name': SearchDatastore_Task} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.621091] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.621386] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 0392a352-74e5-4551-9319-eebbc5e20d3b/0392a352-74e5-4551-9319-eebbc5e20d3b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 854.621715] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2358bd4-60be-4a52-ae00-82d15368eced {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.636064] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 854.636064] env[62109]: value = "task-1116618" [ 854.636064] env[62109]: _type = "Task" [ 854.636064] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.637074] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.060s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.642741] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbaaad1-3f02-4698-833b-6f9b67ec7bbb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.652474] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.656907] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3012c8ac-7d6c-4348-a84a-724c571ced94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.695153] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c081adb7-d2db-4091-81e9-4b61c0a36683 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.698023] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.698266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.698518] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Deleting the datastore file [datastore1] 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.698837] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4e85fd8-979e-491a-9bd9-6f69f3575362 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.709104] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0a10cb-8048-4158-aa33-327cf6b64adb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.713552] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for the task: (returnval){ [ 854.713552] env[62109]: value = "task-1116619" [ 854.713552] env[62109]: _type = "Task" [ 854.713552] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.726177] env[62109]: DEBUG nova.compute.provider_tree [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.736034] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.738124] env[62109]: INFO nova.compute.manager [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Took 45.73 seconds to build instance. [ 854.815289] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116615, 'name': Rename_Task, 'duration_secs': 0.163323} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.815636] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 854.816441] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d09207cc-e5b3-432f-b693-055aee2ffa1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.827667] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 854.827667] env[62109]: value = "task-1116620" [ 854.827667] env[62109]: _type = "Task" [ 854.827667] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.849329] env[62109]: DEBUG oslo_concurrency.lockutils [req-1f408b6e-a231-4cfa-9027-43b184f35dec req-402c35fb-5758-4c51-9548-f4475ae9aaeb service nova] Releasing lock "refresh_cache-0392a352-74e5-4551-9319-eebbc5e20d3b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.850299] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.899839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.899839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.900100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.900904] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9f4f1e-5458-4ffc-b4c7-5a5f73b0b394 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.929644] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.929883] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.930052] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.930287] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.930408] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.930557] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.930778] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.930967] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.931148] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.931323] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.931506] env[62109]: DEBUG nova.virt.hardware [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.938044] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfiguring VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 854.938472] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03fc1206-a41c-4a9b-8e0c-8b56c825e6ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.959872] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 854.959872] env[62109]: value = "task-1116621" [ 854.959872] env[62109]: _type = "Task" [ 854.959872] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.973423] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.096287] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116616, 'name': ReconfigVM_Task, 'duration_secs': 0.242423} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.096831] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 855.096998] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29b09bad-f2db-4c45-b9e5-f6029a16014d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.105483] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 855.105483] env[62109]: value = "task-1116622" [ 855.105483] env[62109]: _type = "Task" [ 855.105483] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.115679] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.154265] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483504} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.155086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.155528] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 0392a352-74e5-4551-9319-eebbc5e20d3b/0392a352-74e5-4551-9319-eebbc5e20d3b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 855.155933] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.156303] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f8a44c2-b528-48bd-a502-24f8ec19a1f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.166723] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 855.166723] env[62109]: value = "task-1116623" [ 855.166723] env[62109]: _type = "Task" [ 855.166723] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.179491] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.223813] env[62109]: DEBUG oslo_vmware.api [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Task: {'id': task-1116619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285455} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.224178] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.224220] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 855.224380] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.224560] env[62109]: INFO nova.compute.manager [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Took 1.23 seconds to destroy the instance on the hypervisor. [ 855.224814] env[62109]: DEBUG oslo.service.loopingcall [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.225073] env[62109]: DEBUG nova.compute.manager [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.225192] env[62109]: DEBUG nova.network.neutron [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.236365] env[62109]: DEBUG nova.scheduler.client.report [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.240668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98171823-653a-4a13-9253-695d0cfe6a84 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.236s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.337440] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116620, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.472185] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.617798] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 855.620394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2b06a9-5e10-464c-b91d-c2f9aebfd835 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.635251] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.636985] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 855.637168] env[62109]: ERROR oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk due to incomplete transfer. [ 855.637439] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3902d982-31fb-46b7-9090-cd5727deedbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.646280] env[62109]: DEBUG oslo_vmware.rw_handles [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52244679-0605-73ee-cbee-2bb5846c3faf/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 855.646495] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploaded image 86f9f547-4757-466b-b9a1-a5222ef70c82 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 855.648988] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 855.649265] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-92ec5d1b-025d-4235-a0d1-f1833d3d75b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.657871] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 855.657871] env[62109]: value = "task-1116624" [ 855.657871] env[62109]: _type = "Task" [ 855.657871] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.669292] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116624, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.680035] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086704} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.680405] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.681368] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3115faf6-99ba-4368-b364-ce82a5243658 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.709092] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 0392a352-74e5-4551-9319-eebbc5e20d3b/0392a352-74e5-4551-9319-eebbc5e20d3b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.709446] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4253182d-b650-4568-97a0-d5b034608620 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.732360] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 855.732360] env[62109]: value = "task-1116625" [ 855.732360] env[62109]: _type = "Task" [ 855.732360] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.744882] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.135s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.747126] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.747655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.696s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.747863] env[62109]: DEBUG nova.objects.instance [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: c694c178-3894-4997-8e99-8f4900a64848] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 855.772071] env[62109]: INFO nova.scheduler.client.report [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Deleted allocations for instance c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4 [ 855.837655] env[62109]: DEBUG oslo_vmware.api [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116620, 'name': PowerOnVM_Task, 'duration_secs': 0.932456} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.838027] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 855.838420] env[62109]: INFO nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Took 8.81 seconds to spawn the instance on the hypervisor. [ 855.838595] env[62109]: DEBUG nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.839637] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dbc00b-c0ea-4589-adfc-17709350ac45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.972106] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.121557] env[62109]: DEBUG oslo_vmware.api [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116622, 'name': PowerOnVM_Task, 'duration_secs': 0.885256} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.121838] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 856.124681] env[62109]: DEBUG nova.compute.manager [None req-fdf4e404-24ef-4528-abc8-336a83db4fe0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 856.125511] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19185cde-1b59-47e7-86d9-5b5f76e92912 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.168509] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116624, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.244650] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.282239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6aff44a8-04a0-4666-852f-cf0c09927b07 tempest-ServersTestManualDisk-1821220686 tempest-ServersTestManualDisk-1821220686-project-member] Lock "c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.040s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.359215] env[62109]: INFO nova.compute.manager [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Took 41.07 seconds to build instance. [ 856.459455] env[62109]: DEBUG nova.network.neutron [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.473965] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.670310] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116624, 'name': Destroy_Task, 'duration_secs': 0.94591} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.670656] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroyed the VM [ 856.670927] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 856.671264] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-03c1e788-3415-4fa9-8d37-9fb19b900145 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.679560] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 856.679560] env[62109]: value = "task-1116626" [ 856.679560] env[62109]: _type = "Task" [ 856.679560] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.691298] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.743157] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116625, 'name': ReconfigVM_Task, 'duration_secs': 0.700187} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.743560] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 0392a352-74e5-4551-9319-eebbc5e20d3b/0392a352-74e5-4551-9319-eebbc5e20d3b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.744313] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24f53bdc-43ae-45d8-8615-fe37fb88a3c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.752450] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 856.752450] env[62109]: value = "task-1116627" [ 856.752450] env[62109]: _type = "Task" [ 856.752450] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.757033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7434f3e4-f4f0-4825-9038-54127c6a260e tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.758270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.648s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.759832] env[62109]: INFO nova.compute.claims [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.766363] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116627, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.860786] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12eb3ce1-a3f9-4233-82ad-f80e44202272 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.941s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.870180] env[62109]: DEBUG nova.compute.manager [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-changed-9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.870902] env[62109]: DEBUG nova.compute.manager [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing instance network info cache due to event network-changed-9b48845a-ae70-493f-8ea7-542088d62859. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 856.870902] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.871067] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.871113] env[62109]: DEBUG nova.network.neutron [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing network info cache for port 9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 856.964759] env[62109]: INFO nova.compute.manager [-] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Took 1.74 seconds to deallocate network for instance. [ 856.981638] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.064140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.064457] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.194465] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116626, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.275831] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116627, 'name': Rename_Task, 'duration_secs': 0.205452} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.275831] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 857.275831] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3044053c-9a5a-4e07-97a0-d2d29e5b06e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.284172] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 857.284172] env[62109]: value = "task-1116628" [ 857.284172] env[62109]: _type = "Task" [ 857.284172] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.297039] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.478664] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.479304] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.568063] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 857.694021] env[62109]: DEBUG oslo_vmware.api [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116626, 'name': RemoveSnapshot_Task, 'duration_secs': 0.938948} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.694021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 857.694021] env[62109]: INFO nova.compute.manager [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 857.698913] env[62109]: DEBUG nova.network.neutron [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updated VIF entry in instance network info cache for port 9b48845a-ae70-493f-8ea7-542088d62859. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 857.699534] env[62109]: DEBUG nova.network.neutron [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b48845a-ae70-493f-8ea7-542088d62859", "address": "fa:16:3e:65:6d:4f", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b48845a-ae", "ovs_interfaceid": "9b48845a-ae70-493f-8ea7-542088d62859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.799069] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.976503] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.093238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.168289] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56eefbdd-df36-4b40-bb9e-00283f07f832 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.179219] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b102a166-fea3-4389-a28d-c324e93579d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.215642] env[62109]: DEBUG oslo_concurrency.lockutils [req-c4f93206-61ed-4ee6-ac4e-5259e24eff49 req-b9d72e59-d990-4e0d-a10b-995d96c129a2 service nova] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.219211] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7b0f26-1dd4-476f-b548-0f4ae9692259 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.228495] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8430c67-c95e-4291-806b-f759f595c98f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.247209] env[62109]: DEBUG nova.compute.provider_tree [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.277240] env[62109]: DEBUG nova.compute.manager [None req-b9e809d5-9dc9-4aba-b24b-675f401a4a74 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Found 2 images (rotation: 2) {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 858.297760] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.477767] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.751154] env[62109]: DEBUG nova.scheduler.client.report [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.797305] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.944264] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.944614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.965576] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.965809] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.965982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.966299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.966418] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.968521] env[62109]: INFO nova.compute.manager [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Terminating instance [ 858.974042] env[62109]: DEBUG nova.compute.manager [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 858.974283] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 858.975311] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386c66a7-4247-494a-aa14-b9e345e51930 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.984068] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 858.988243] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f1b2c36-9dfa-4db4-9f02-f291c1d9d17b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.990766] env[62109]: DEBUG oslo_vmware.api [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116621, 'name': ReconfigVM_Task, 'duration_secs': 3.987949} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.990766] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.990766] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfigured VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 859.097376] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 859.097645] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 859.097853] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore2] 5d656f91-d35f-45e1-8892-7cdacd306960 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.098390] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f414e44-a2c1-414f-9d6e-1eb0b6b7ce43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.110449] env[62109]: DEBUG oslo_vmware.api [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 859.110449] env[62109]: value = "task-1116630" [ 859.110449] env[62109]: _type = "Task" [ 859.110449] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.119643] env[62109]: DEBUG oslo_vmware.api [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.257351] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.257969] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 859.260823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.858s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.262461] env[62109]: INFO nova.compute.claims [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.299197] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.451691] env[62109]: DEBUG nova.compute.utils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.497953] env[62109]: DEBUG oslo_concurrency.lockutils [None req-03455c73-a44e-4a25-b993-d4dd45465607 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 10.427s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.622526] env[62109]: DEBUG oslo_vmware.api [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307337} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.622884] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.623843] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 859.624182] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 859.624476] env[62109]: INFO nova.compute.manager [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Took 0.65 seconds to destroy the instance on the hypervisor. [ 859.624835] env[62109]: DEBUG oslo.service.loopingcall [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.625140] env[62109]: DEBUG nova.compute.manager [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 859.625277] env[62109]: DEBUG nova.network.neutron [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 859.766897] env[62109]: DEBUG nova.compute.utils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.771019] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 859.797975] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.954483] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.271854] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 860.298284] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.541636] env[62109]: DEBUG nova.network.neutron [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.726288] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089527e5-4aba-4974-877e-b928fb9e76b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.734141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714f9cf5-8d9b-427b-9595-fdbb51ebedcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.766986] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "b1321874-8f97-4444-9f9c-d586d51a9e92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.767270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.767497] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.767692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.767865] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.770323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18394a9b-3567-46de-8fa6-3466e6840eed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.773471] env[62109]: INFO nova.compute.manager [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Terminating instance [ 860.775603] env[62109]: DEBUG nova.compute.manager [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 860.775804] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 860.776539] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f06a406-1ea1-4663-a0b2-3428c50dd8bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.785555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d3be42-4a3c-48de-86ef-362077ed5667 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.793077] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 860.796367] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-344c2d0c-267c-44cf-953a-298745d42b7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.799870] env[62109]: DEBUG nova.compute.manager [req-1fcb9933-101c-4bd4-956a-36902ff4ad4b req-cd007ca1-f823-44f7-a52e-d3f2ce9653bc service nova] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Received event network-vif-deleted-f5f52514-4146-44d3-9e0e-5ee87f782604 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.808981] env[62109]: DEBUG nova.compute.provider_tree [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.814559] env[62109]: DEBUG oslo_vmware.api [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116628, 'name': PowerOnVM_Task, 'duration_secs': 3.344902} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.816763] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 860.816988] env[62109]: INFO nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Took 11.05 seconds to spawn the instance on the hypervisor. [ 860.817213] env[62109]: DEBUG nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.818284] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 860.818284] env[62109]: value = "task-1116631" [ 860.818284] env[62109]: _type = "Task" [ 860.818284] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.819061] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6171524-4175-40f9-98a8-a3d57a110452 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.834923] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.034790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.035068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.035301] env[62109]: INFO nova.compute.manager [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Attaching volume 54e65dec-9bc6-4ad5-85d6-378dd519a464 to /dev/sdb [ 861.046316] env[62109]: INFO nova.compute.manager [-] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Took 1.42 seconds to deallocate network for instance. [ 861.081268] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3973cc4-3a42-4fb7-84d7-9407de704b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.086985] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95e3848-7610-4a91-8ed6-a24fa1e3b9a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.105428] env[62109]: DEBUG nova.virt.block_device [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating existing volume attachment record: 60e68d92-eaf6-4531-84b6-da3da4b9da71 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 861.293470] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 861.312443] env[62109]: DEBUG nova.scheduler.client.report [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 861.341314] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116631, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.347086] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.347498] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.347888] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.348113] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.348429] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.348771] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.349286] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.349431] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.349717] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.350073] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.350422] env[62109]: DEBUG nova.virt.hardware [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.352611] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2c22ea-1655-4fc6-a712-5cd8ae2ed3b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.365305] env[62109]: INFO nova.compute.manager [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Took 41.08 seconds to build instance. [ 861.373765] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b396bf-4cbb-4a8b-a876-a65e16360a75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.391449] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.398820] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Creating folder: Project (1101d6b1947f4f19860e0dc6e98b2c33). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 861.399945] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e99aed79-6c20-48c2-8c38-a428cdecb4f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.415400] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Created folder: Project (1101d6b1947f4f19860e0dc6e98b2c33) in parent group-v244329. [ 861.415670] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Creating folder: Instances. Parent ref: group-v244464. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 861.415954] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d23bd26-a691-4c13-8a57-1d850b6a8a47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.431631] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Created folder: Instances in parent group-v244464. [ 861.431929] env[62109]: DEBUG oslo.service.loopingcall [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.432200] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 861.432566] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd57dcb4-90fa-4f15-9d9c-2adc59ac7211 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.454942] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.454942] env[62109]: value = "task-1116637" [ 861.454942] env[62109]: _type = "Task" [ 861.454942] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.468338] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116637, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.557946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.691889] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.692476] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.692888] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.693260] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.693573] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.697325] env[62109]: INFO nova.compute.manager [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Terminating instance [ 861.701406] env[62109]: DEBUG nova.compute.manager [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.701718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 861.703212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30ede30-c8e3-4a4c-944b-4e771d9e6e48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.716244] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 861.716244] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bb4f50b-f970-42df-a9aa-dbae8f3f39de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.723714] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 861.723714] env[62109]: value = "task-1116638" [ 861.723714] env[62109]: _type = "Task" [ 861.723714] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.734026] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.820502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.821161] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 861.831215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.006s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.832887] env[62109]: INFO nova.compute.claims [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.851982] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116631, 'name': PowerOffVM_Task, 'duration_secs': 0.581214} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.852455] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 861.852657] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 861.852938] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-260049f3-8efd-45fd-9cec-5ab2d7e37b3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.868072] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fea8e16f-a2bf-4114-8033-12593eb6baf4 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.199s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.928418] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 861.928753] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 861.929024] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleting the datastore file [datastore1] b1321874-8f97-4444-9f9c-d586d51a9e92 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.929666] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9d06d3d-3963-48e2-afd9-1774f9adc041 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.940235] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 861.940235] env[62109]: value = "task-1116640" [ 861.940235] env[62109]: _type = "Task" [ 861.940235] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.951111] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.966339] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116637, 'name': CreateVM_Task, 'duration_secs': 0.359129} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.966576] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 861.967014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.967228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.967636] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.967930] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6a6d396-c26e-4aef-b302-cc261355ee36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.973627] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 861.973627] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52713d6b-2826-e1b5-771f-79f4863b08f5" [ 861.973627] env[62109]: _type = "Task" [ 861.973627] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.983197] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52713d6b-2826-e1b5-771f-79f4863b08f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.165829] env[62109]: DEBUG nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.166752] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e63ead2-1590-4bf2-8f0d-969144488ab0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.233948] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116638, 'name': PowerOffVM_Task, 'duration_secs': 0.255941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.234267] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 862.234477] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 862.234693] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d703daf2-dde4-4a24-ab1f-ebfc7198d2b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.308368] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 862.308368] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 862.308368] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleting the datastore file [datastore2] d7d1029c-9b7c-4bd7-b606-a1962a129461 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.308791] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9bc34a3-d777-4fa2-b448-ac380d6910dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.316165] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for the task: (returnval){ [ 862.316165] env[62109]: value = "task-1116642" [ 862.316165] env[62109]: _type = "Task" [ 862.316165] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.326217] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.339036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "5c7dbe04-5027-49cd-a478-79046fee1f16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.339036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.345235] env[62109]: DEBUG nova.compute.utils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.350016] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.350016] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 862.410638] env[62109]: DEBUG nova.policy [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e608055854844801b9f7c51d07820917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca12aa68e4b4d4d8cf1e3332deb44f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.458848] env[62109]: DEBUG oslo_vmware.api [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35764} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.459021] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.459199] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 862.460027] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 862.460027] env[62109]: INFO nova.compute.manager [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Took 1.68 seconds to destroy the instance on the hypervisor. [ 862.460027] env[62109]: DEBUG oslo.service.loopingcall [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.460027] env[62109]: DEBUG nova.compute.manager [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.460027] env[62109]: DEBUG nova.network.neutron [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.486462] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52713d6b-2826-e1b5-771f-79f4863b08f5, 'name': SearchDatastore_Task, 'duration_secs': 0.011373} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.486784] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.487137] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.487323] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.487570] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.487891] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.488102] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a763d5d-8d08-4418-9c9a-b3ef09e04b0c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.501867] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.502117] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 862.502928] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7995d7-b330-45da-9f6e-a40cb1b4c8d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.509028] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 862.509028] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98263-c2ef-147f-0dfa-05360acf95d5" [ 862.509028] env[62109]: _type = "Task" [ 862.509028] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.517213] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98263-c2ef-147f-0dfa-05360acf95d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.591292] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-a54ea2c9-4872-4e92-893b-ad7c797f25ac" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.591636] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-a54ea2c9-4872-4e92-893b-ad7c797f25ac" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.591985] env[62109]: DEBUG nova.objects.instance [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'flavor' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.677995] env[62109]: INFO nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] instance snapshotting [ 862.678644] env[62109]: DEBUG nova.objects.instance [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.793350] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Successfully created port: 70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.810147] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.810523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.810789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.811029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.811260] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.814226] env[62109]: INFO nova.compute.manager [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Terminating instance [ 862.816142] env[62109]: DEBUG nova.compute.manager [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 862.816344] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 862.817235] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9747514d-88b2-4743-bd61-185a5b21f845 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.829453] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 862.832238] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38a91388-4ceb-4166-9c00-6d7d6abb50d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.833776] env[62109]: DEBUG oslo_vmware.api [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Task: {'id': task-1116642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305046} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.834032] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.834250] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 862.834487] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 862.834720] env[62109]: INFO nova.compute.manager [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Took 1.13 seconds to destroy the instance on the hypervisor. [ 862.834981] env[62109]: DEBUG oslo.service.loopingcall [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.835577] env[62109]: DEBUG nova.compute.manager [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.835704] env[62109]: DEBUG nova.network.neutron [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.841494] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 862.844204] env[62109]: DEBUG oslo_vmware.api [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 862.844204] env[62109]: value = "task-1116643" [ 862.844204] env[62109]: _type = "Task" [ 862.844204] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.854452] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 862.861495] env[62109]: DEBUG oslo_vmware.api [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.019164] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98263-c2ef-147f-0dfa-05360acf95d5, 'name': SearchDatastore_Task, 'duration_secs': 0.022678} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.020181] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ea880e-c293-493f-947c-d6c8080b52d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.031023] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 863.031023] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fa8797-5fcf-73c7-1780-cd704fb15062" [ 863.031023] env[62109]: _type = "Task" [ 863.031023] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.037153] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fa8797-5fcf-73c7-1780-cd704fb15062, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.185583] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f79903-c8bb-4eab-bb31-016fa71f373b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.207093] env[62109]: DEBUG nova.network.neutron [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.211923] env[62109]: DEBUG nova.objects.instance [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'pci_requests' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.213499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da6c138-9eeb-4995-8b72-dc8af75389b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.334866] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2502a0cb-7546-46a4-bcdf-647a974c4e1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.343028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4f8887-8fea-483f-bcc9-3a366fd6c782 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.356614] env[62109]: DEBUG oslo_vmware.api [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116643, 'name': PowerOffVM_Task, 'duration_secs': 0.188314} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.387233] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 863.387455] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 863.388749] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5defe451-d5ea-4d69-b896-21a1479bb0e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.390968] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f236839-254b-4382-87dd-d77606840e02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.401979] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.404388] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce009623-1ee9-490a-8ed1-32a16f02593b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.420523] env[62109]: DEBUG nova.compute.provider_tree [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.465387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 863.465765] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 863.465965] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleting the datastore file [datastore2] 0f197e98-9630-4928-8707-56bbf6c1e5a1 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.466264] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8878db08-f27b-418f-a70a-561cacd8eb03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.197762] env[62109]: DEBUG nova.compute.manager [req-bf5f730b-17f3-4162-8330-583d3cfa97cb req-23bac069-8b1b-4668-947b-300ad44c8177 service nova] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Received event network-vif-deleted-70961368-3e7f-4c05-b619-fdb2dc044a77 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.201254] env[62109]: DEBUG nova.network.neutron [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.202341] env[62109]: INFO nova.compute.manager [-] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Took 1.74 seconds to deallocate network for instance. [ 864.202759] env[62109]: DEBUG nova.objects.base [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Object Instance<7afbb35b-9865-40a7-8b37-d6a661a186a9> lazy-loaded attributes: flavor,pci_requests {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 864.202975] env[62109]: DEBUG nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 864.209021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 864.209021] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.209021] env[62109]: DEBUG nova.scheduler.client.report [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.213293] env[62109]: INFO nova.compute.manager [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Rescuing [ 864.213293] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.213293] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.213293] env[62109]: DEBUG nova.network.neutron [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.216478] env[62109]: DEBUG oslo_vmware.api [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 864.216478] env[62109]: value = "task-1116646" [ 864.216478] env[62109]: _type = "Task" [ 864.216478] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.217476] env[62109]: DEBUG nova.compute.manager [req-87db8e29-b8f8-4fd0-ad09-f30c2ddde7b5 req-eb947e8e-bedd-4d77-9527-aac7a0a3da06 service nova] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Received event network-vif-deleted-56c50c1b-b3f7-4097-b080-6b487489343b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.219141] env[62109]: INFO nova.compute.manager [-] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Took 1.38 seconds to deallocate network for instance. [ 864.224352] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c93f505f-4d09-48da-b7ee-30030dcc7572 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.250632] env[62109]: DEBUG oslo_vmware.api [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156946} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.251230] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fa8797-5fcf-73c7-1780-cd704fb15062, 'name': SearchDatastore_Task, 'duration_secs': 0.012634} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.253133] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.253358] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 864.253546] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 864.253856] env[62109]: INFO nova.compute.manager [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Took 1.44 seconds to destroy the instance on the hypervisor. [ 864.254062] env[62109]: DEBUG oslo.service.loopingcall [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.254668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.255018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5bea4229-6182-445e-b569-e7413ce92b93/5bea4229-6182-445e-b569-e7413ce92b93.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 864.256381] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 864.256381] env[62109]: value = "task-1116647" [ 864.256381] env[62109]: _type = "Task" [ 864.256381] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.258263] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.258494] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.258693] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.258886] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.259044] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.259193] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.259397] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.259552] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.259712] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.259896] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.260098] env[62109]: DEBUG nova.virt.hardware [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.260376] env[62109]: DEBUG nova.compute.manager [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.261191] env[62109]: DEBUG nova.network.neutron [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.262462] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1993246-7027-4b9a-b528-bff1f85dd411 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.266770] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4ca56a-dab1-49c8-a6af-80f790825c4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.284342] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116647, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.285128] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 864.285128] env[62109]: value = "task-1116648" [ 864.285128] env[62109]: _type = "Task" [ 864.285128] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.286403] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a593c892-4c68-4dad-92b2-a308ad26ff87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.308501] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.355023] env[62109]: DEBUG nova.policy [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.641088] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Successfully updated port: 70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.720316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.893s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.720847] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 864.723593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.821s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.723934] env[62109]: DEBUG nova.objects.instance [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'resources' on Instance uuid c694c178-3894-4997-8e99-8f4900a64848 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.740024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.745574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.778514] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116647, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.798276] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116648, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507825} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.798572] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5bea4229-6182-445e-b569-e7413ce92b93/5bea4229-6182-445e-b569-e7413ce92b93.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 864.798788] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.799085] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eea7aa3d-4670-4d12-8af9-00895c300cef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.810994] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 864.810994] env[62109]: value = "task-1116649" [ 864.810994] env[62109]: _type = "Task" [ 864.810994] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.820340] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116649, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.049956] env[62109]: DEBUG nova.network.neutron [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [{"id": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "address": "fa:16:3e:f8:8b:04", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8bdd11-0b", "ovs_interfaceid": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.147231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.147396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.147607] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 865.227888] env[62109]: DEBUG nova.compute.utils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 865.232617] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 865.278866] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116647, 'name': CreateSnapshot_Task, 'duration_secs': 0.564942} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.279163] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 865.279900] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85da091-1ed3-42dc-8256-4a69f9bbb367 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.290739] env[62109]: DEBUG nova.compute.manager [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 865.296618] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2858085-5212-44cf-84c6-ef0740cefebd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.323907] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116649, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177807} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.324199] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.324970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ee2cfb-ffe1-41dd-b9a8-9f573d0df093 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.347507] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 5bea4229-6182-445e-b569-e7413ce92b93/5bea4229-6182-445e-b569-e7413ce92b93.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.351025] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd33a1ac-1580-41e9-973c-8d00e2c59053 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.372137] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 865.372137] env[62109]: value = "task-1116650" [ 865.372137] env[62109]: _type = "Task" [ 865.372137] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.383967] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116650, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.411932] env[62109]: DEBUG nova.network.neutron [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.552508] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.653017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16f736c-a163-4ad4-b1ce-41c7932766ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.656747] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 865.657224] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244463', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'name': 'volume-54e65dec-9bc6-4ad5-85d6-378dd519a464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '342b7069-22fb-4934-9ec3-8ecbc987696e', 'attached_at': '', 'detached_at': '', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'serial': '54e65dec-9bc6-4ad5-85d6-378dd519a464'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 865.658112] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce84a90-af84-4d7c-9be2-1573dabe5cf7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.676443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3470b7d4-2faa-4b7e-af55-75a8ee2fb273 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.680101] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05a37ad-89d7-4765-9177-0ab68f366bea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.710524] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 865.725313] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21ac01a-5cdc-4566-9f33-08067fddb1ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.736253] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-54e65dec-9bc6-4ad5-85d6-378dd519a464/volume-54e65dec-9bc6-4ad5-85d6-378dd519a464.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.737166] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.739624] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22e36660-a5ba-47e1-98aa-ee648b828b30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.769117] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1b6937-ca20-453f-99c3-7cd28c560601 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.773114] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 865.773114] env[62109]: value = "task-1116651" [ 865.773114] env[62109]: _type = "Task" [ 865.773114] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.787352] env[62109]: DEBUG nova.compute.provider_tree [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.795188] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.805648] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 865.807568] env[62109]: INFO nova.compute.manager [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] instance snapshotting [ 865.808441] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65f6cec0-6f4d-4cfd-b419-a4182fa08b62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.812824] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efcfaa0-6599-4352-a69b-b81c72c25916 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.833770] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d22635-e291-4d3b-99fb-b42c830ed386 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.836549] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 865.836549] env[62109]: value = "task-1116652" [ 865.836549] env[62109]: _type = "Task" [ 865.836549] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.852499] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116652, 'name': CloneVM_Task} progress is 11%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.875029] env[62109]: DEBUG nova.compute.manager [req-5b6f8a96-882b-4515-a9cd-159612ef3605 req-d4894b4c-f59f-4105-911c-93c1f81dff4c service nova] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Received event network-vif-deleted-1d0bfd82-ef3f-4da5-b19a-d4f2e10cb6a5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.875029] env[62109]: DEBUG nova.compute.manager [req-5b6f8a96-882b-4515-a9cd-159612ef3605 req-d4894b4c-f59f-4105-911c-93c1f81dff4c service nova] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Received event network-vif-deleted-484de316-0fb7-44a9-b071-2c442b1388ed {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.889064] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.914894] env[62109]: INFO nova.compute.manager [-] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Took 1.65 seconds to deallocate network for instance. [ 865.943029] env[62109]: DEBUG nova.network.neutron [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.085302] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 866.085935] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b2ed17e-ba5d-4d43-8049-6aab94da004a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.094041] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 866.094041] env[62109]: value = "task-1116653" [ 866.094041] env[62109]: _type = "Task" [ 866.094041] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.103125] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.150278] env[62109]: DEBUG nova.compute.manager [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Received event network-vif-plugged-70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.150579] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.150752] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.150927] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.151340] env[62109]: DEBUG nova.compute.manager [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] No waiting events found dispatching network-vif-plugged-70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.151621] env[62109]: WARNING nova.compute.manager [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Received unexpected event network-vif-plugged-70216814-67e6-4c4a-80a6-94f8cf8dd246 for instance with vm_state building and task_state spawning. [ 866.151912] env[62109]: DEBUG nova.compute.manager [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Received event network-changed-70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.152192] env[62109]: DEBUG nova.compute.manager [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Refreshing instance network info cache due to event network-changed-70216814-67e6-4c4a-80a6-94f8cf8dd246. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.153581] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Acquiring lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.261735] env[62109]: DEBUG nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Successfully updated port: a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.285652] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116651, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.291313] env[62109]: DEBUG nova.scheduler.client.report [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.351972] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 866.352344] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116652, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.352713] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e5ed6d65-a022-44c5-aa33-5c4d681a5907 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.360898] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 866.360898] env[62109]: value = "task-1116654" [ 866.360898] env[62109]: _type = "Task" [ 866.360898] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.369273] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116654, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.382345] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116650, 'name': ReconfigVM_Task, 'duration_secs': 0.88677} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.382629] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 5bea4229-6182-445e-b569-e7413ce92b93/5bea4229-6182-445e-b569-e7413ce92b93.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.383325] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae3f6855-0451-4266-8f45-2bfcbc00e99e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.391088] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 866.391088] env[62109]: value = "task-1116655" [ 866.391088] env[62109]: _type = "Task" [ 866.391088] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.401116] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116655, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.422014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.445395] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.445833] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Instance network_info: |[{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 866.446196] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Acquired lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.446415] env[62109]: DEBUG nova.network.neutron [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Refreshing network info cache for port 70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 866.448065] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:3a:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70216814-67e6-4c4a-80a6-94f8cf8dd246', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.455503] env[62109]: DEBUG oslo.service.loopingcall [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.456019] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.456958] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e10148fa-c29e-4044-9e90-77e70f912bcf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.478510] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.478510] env[62109]: value = "task-1116656" [ 866.478510] env[62109]: _type = "Task" [ 866.478510] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.488947] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116656, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.605621] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116653, 'name': PowerOffVM_Task, 'duration_secs': 0.375627} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.605973] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 866.606908] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008d8b5b-9cbe-4401-ae30-67d761de532f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.626992] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeae63b-8a7c-4441-8138-06ac5db7c634 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.661907] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 866.662261] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a8477c0-65cb-42a1-9b9f-1c29b3a0cb02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.670955] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 866.670955] env[62109]: value = "task-1116657" [ 866.670955] env[62109]: _type = "Task" [ 866.670955] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.679993] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.766578] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.766802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.767025] env[62109]: DEBUG nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.769741] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.785887] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116651, 'name': ReconfigVM_Task, 'duration_secs': 0.552761} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.788108] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-54e65dec-9bc6-4ad5-85d6-378dd519a464/volume-54e65dec-9bc6-4ad5-85d6-378dd519a464.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.793448] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb15993b-baef-48c0-a8d7-4bea2b53d8ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.806420] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.806705] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.806868] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.807069] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.807230] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.807390] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.807632] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.807819] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.808039] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.808207] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.808372] env[62109]: DEBUG nova.virt.hardware [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.809173] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.814859] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7556c2d7-95ea-4700-9f5f-a3a00940b5c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.815719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.479s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.815966] env[62109]: DEBUG nova.objects.instance [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lazy-loading 'resources' on Instance uuid 9b2968bb-ed06-4740-b43e-b4aa1fac76dd {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.823853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8a78dc-8496-4e1b-a85b-9fbe5e981559 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.831646] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 866.831646] env[62109]: value = "task-1116658" [ 866.831646] env[62109]: _type = "Task" [ 866.831646] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.847300] env[62109]: INFO nova.scheduler.client.report [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocations for instance c694c178-3894-4997-8e99-8f4900a64848 [ 866.848867] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.854546] env[62109]: DEBUG oslo.service.loopingcall [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.859083] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.865116] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a93b1e3-933f-4625-be01-56047ca0e18d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.877571] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.885250] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116652, 'name': CloneVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.891727] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116654, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.895892] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.895892] env[62109]: value = "task-1116659" [ 866.895892] env[62109]: _type = "Task" [ 866.895892] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.902906] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116655, 'name': Rename_Task, 'duration_secs': 0.168166} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.904056] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 866.904056] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dabc124a-65a9-4027-af1a-ba3f6f10ef96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.909302] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116659, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.911720] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 866.911720] env[62109]: value = "task-1116660" [ 866.911720] env[62109]: _type = "Task" [ 866.911720] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.920296] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.991083] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116656, 'name': CreateVM_Task, 'duration_secs': 0.355811} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.991251] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 866.991991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.992181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.992531] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.995747] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb9027e3-3ecd-45db-8af3-37ea2a8e3356 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.002297] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 867.002297] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528424e9-b86e-e1da-0b3c-7e4ce1d20ead" [ 867.002297] env[62109]: _type = "Task" [ 867.002297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.013107] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528424e9-b86e-e1da-0b3c-7e4ce1d20ead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.182663] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 867.182961] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.183301] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.183510] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.183737] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.184410] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3613c095-99df-4ded-b619-27e2a6232f06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.195773] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.196039] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 867.196979] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-940dbb52-0480-4457-b723-ea8b6e5d1bcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.204209] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 867.204209] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bad5-2d0b-c931-2cff-ef611aba4c63" [ 867.204209] env[62109]: _type = "Task" [ 867.204209] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.213503] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bad5-2d0b-c931-2cff-ef611aba4c63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.214419] env[62109]: DEBUG nova.network.neutron [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updated VIF entry in instance network info cache for port 70216814-67e6-4c4a-80a6-94f8cf8dd246. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 867.214756] env[62109]: DEBUG nova.network.neutron [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.309263] env[62109]: WARNING nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] cfbec6c5-3421-476e-aca8-de96e0de15af already exists in list: networks containing: ['cfbec6c5-3421-476e-aca8-de96e0de15af']. ignoring it [ 867.309539] env[62109]: WARNING nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] cfbec6c5-3421-476e-aca8-de96e0de15af already exists in list: networks containing: ['cfbec6c5-3421-476e-aca8-de96e0de15af']. ignoring it [ 867.342949] env[62109]: DEBUG oslo_vmware.api [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116658, 'name': ReconfigVM_Task, 'duration_secs': 0.178606} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.345461] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244463', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'name': 'volume-54e65dec-9bc6-4ad5-85d6-378dd519a464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '342b7069-22fb-4934-9ec3-8ecbc987696e', 'attached_at': '', 'detached_at': '', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'serial': '54e65dec-9bc6-4ad5-85d6-378dd519a464'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 867.361472] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116652, 'name': CloneVM_Task, 'duration_secs': 1.081403} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.362843] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Created linked-clone VM from snapshot [ 867.362843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761bd8d8-2cf1-4673-a512-19d5c7813f87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.375867] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploading image 44e4d408-7967-4020-b05c-e6a1f4131968 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 867.389031] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6754d85d-4f2c-4547-888d-1d8d3dfb58c0 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "c694c178-3894-4997-8e99-8f4900a64848" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.972s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.401012] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116654, 'name': CreateSnapshot_Task, 'duration_secs': 0.719947} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.406401] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.410528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8bba8d-265d-46ca-a31b-ac2e692ceb76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.421334] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 867.421334] env[62109]: value = "vm-244468" [ 867.421334] env[62109]: _type = "VirtualMachine" [ 867.421334] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 867.425027] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2352dca1-dd89-4320-91fa-84a2aefa7bfe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.426841] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116659, 'name': CreateVM_Task, 'duration_secs': 0.364696} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.433287] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 867.440430] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.440430] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.440644] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.440961] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116660, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.443952] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84edd51f-baf8-4626-88c0-de5d21d5d12e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.447835] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease: (returnval){ [ 867.447835] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5275664f-bcce-d150-2092-5889c490efb2" [ 867.447835] env[62109]: _type = "HttpNfcLease" [ 867.447835] env[62109]: } obtained for exporting VM: (result){ [ 867.447835] env[62109]: value = "vm-244468" [ 867.447835] env[62109]: _type = "VirtualMachine" [ 867.447835] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 867.448838] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the lease: (returnval){ [ 867.448838] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5275664f-bcce-d150-2092-5889c490efb2" [ 867.448838] env[62109]: _type = "HttpNfcLease" [ 867.448838] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 867.458383] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 867.458383] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52289363-2f8d-4447-a7a6-91e86e96e045" [ 867.458383] env[62109]: _type = "Task" [ 867.458383] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.464127] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 867.464127] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5275664f-bcce-d150-2092-5889c490efb2" [ 867.464127] env[62109]: _type = "HttpNfcLease" [ 867.464127] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 867.467833] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 867.467833] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5275664f-bcce-d150-2092-5889c490efb2" [ 867.467833] env[62109]: _type = "HttpNfcLease" [ 867.467833] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 867.467833] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db14ed2-ced9-4ff2-8c5b-7e8c322e2a55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.474371] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52289363-2f8d-4447-a7a6-91e86e96e045, 'name': SearchDatastore_Task} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.478214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.478214] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.478214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.483949] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 867.484254] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 867.559091] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528424e9-b86e-e1da-0b3c-7e4ce1d20ead, 'name': SearchDatastore_Task, 'duration_secs': 0.016022} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.559281] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.559517] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.559747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.559900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.560095] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.560479] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d031cf77-021a-4f1c-b9bd-bb0b242e0424 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.570133] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.570340] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 867.571299] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81a88669-f931-4664-a601-681540c526f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.578516] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 867.578516] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5251de46-3573-4c5f-4850-e477d71d4f51" [ 867.578516] env[62109]: _type = "Task" [ 867.578516] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.586875] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de1ee76b-f4fd-4a53-9af7-bf62c6e09931 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.593997] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5251de46-3573-4c5f-4850-e477d71d4f51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.716502] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bad5-2d0b-c931-2cff-ef611aba4c63, 'name': SearchDatastore_Task, 'duration_secs': 0.022172} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.717326] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c21f83f9-b756-4d86-9515-6be8fc85e2e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.719849] env[62109]: DEBUG oslo_concurrency.lockutils [req-191359b2-f09d-4e53-89dc-ccff967a1b11 req-5e2cb26b-21d2-42a5-947e-6e2572d641d6 service nova] Releasing lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.725115] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 867.725115] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dc22c-e73e-8cfa-9cdf-63a12f240e3c" [ 867.725115] env[62109]: _type = "Task" [ 867.725115] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.733794] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dc22c-e73e-8cfa-9cdf-63a12f240e3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.840282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7197f7-6d4f-4f8c-a514-7dcfdd3cba13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.855740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042e8bd7-877d-41ca-a1c3-265d3b579ab8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.887905] env[62109]: DEBUG nova.network.neutron [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b48845a-ae70-493f-8ea7-542088d62859", "address": "fa:16:3e:65:6d:4f", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b48845a-ae", "ovs_interfaceid": "9b48845a-ae70-493f-8ea7-542088d62859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "address": "fa:16:3e:40:13:3b", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54ea2c9-48", "ovs_interfaceid": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.890088] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ade8fc4-99bf-4e58-bf89-4d1f9e5ab5b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.902668] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3653a88-eaa9-4129-92d6-5c7ab116309a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.919407] env[62109]: DEBUG nova.compute.provider_tree [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.930928] env[62109]: DEBUG oslo_vmware.api [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116660, 'name': PowerOnVM_Task, 'duration_secs': 0.611412} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.931356] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 867.931578] env[62109]: INFO nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Took 6.64 seconds to spawn the instance on the hypervisor. [ 867.931827] env[62109]: DEBUG nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.932969] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169539c0-79a6-4df3-a62b-aba8ad14ce78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.950373] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 867.951091] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6b703e3a-27bc-4706-87bf-28d63938b2c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.961595] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 867.961595] env[62109]: value = "task-1116662" [ 867.961595] env[62109]: _type = "Task" [ 867.961595] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.972329] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116662, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.050739] env[62109]: DEBUG nova.compute.manager [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-plugged-a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.051054] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.051342] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.051486] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.051744] env[62109]: DEBUG nova.compute.manager [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] No waiting events found dispatching network-vif-plugged-a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 868.052025] env[62109]: WARNING nova.compute.manager [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received unexpected event network-vif-plugged-a54ea2c9-4872-4e92-893b-ad7c797f25ac for instance with vm_state active and task_state None. [ 868.052276] env[62109]: DEBUG nova.compute.manager [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-changed-a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.052618] env[62109]: DEBUG nova.compute.manager [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing instance network info cache due to event network-changed-a54ea2c9-4872-4e92-893b-ad7c797f25ac. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.052964] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.091741] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5251de46-3573-4c5f-4850-e477d71d4f51, 'name': SearchDatastore_Task, 'duration_secs': 0.014395} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.092767] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a914e128-2b5e-48e4-9e7d-be9847978bf3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.100729] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 868.100729] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5226a2b6-0717-2b0f-632c-d90e645b6443" [ 868.100729] env[62109]: _type = "Task" [ 868.100729] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.112176] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5226a2b6-0717-2b0f-632c-d90e645b6443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.240441] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dc22c-e73e-8cfa-9cdf-63a12f240e3c, 'name': SearchDatastore_Task, 'duration_secs': 0.017711} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.241020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.241511] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 868.242040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.242429] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.242795] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6aff220-707c-40b8-8dba-656e77537b23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.245808] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e54f51bb-e67c-4947-bd14-25a7df290093 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.257867] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 868.257867] env[62109]: value = "task-1116663" [ 868.257867] env[62109]: _type = "Task" [ 868.257867] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.258161] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.258333] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 868.259174] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d893149-1dcf-46f2-87fe-edfd5df92025 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.274387] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.275157] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 868.275157] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c60c8b-6f44-a160-c9aa-cb387b4debf0" [ 868.275157] env[62109]: _type = "Task" [ 868.275157] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.285636] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c60c8b-6f44-a160-c9aa-cb387b4debf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.396220] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.397181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.397378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.398305] env[62109]: DEBUG nova.objects.instance [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'flavor' on Instance uuid 342b7069-22fb-4934-9ec3-8ecbc987696e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.399711] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.399973] env[62109]: DEBUG nova.network.neutron [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Refreshing network info cache for port a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.401954] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e447724-dcf9-499b-ae16-80bf22fb8cf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.424148] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.424532] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.424754] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.424983] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.425196] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.425438] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.425795] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.426044] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.426296] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.426472] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.426752] env[62109]: DEBUG nova.virt.hardware [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.433462] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfiguring VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 868.436117] env[62109]: DEBUG nova.scheduler.client.report [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.439529] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f56d1716-81f1-4fdf-b682-10fc75c2d326 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.454590] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.461194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.064s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.463344] env[62109]: INFO nova.compute.claims [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.470352] env[62109]: INFO nova.compute.manager [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Took 31.39 seconds to build instance. [ 868.479307] env[62109]: DEBUG oslo_vmware.api [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 868.479307] env[62109]: value = "task-1116664" [ 868.479307] env[62109]: _type = "Task" [ 868.479307] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.484794] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116662, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.502326] env[62109]: DEBUG oslo_vmware.api [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116664, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.503693] env[62109]: INFO nova.scheduler.client.report [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Deleted allocations for instance 9b2968bb-ed06-4740-b43e-b4aa1fac76dd [ 868.615051] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5226a2b6-0717-2b0f-632c-d90e645b6443, 'name': SearchDatastore_Task, 'duration_secs': 0.011035} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.615569] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.615991] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 868.616341] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfb4ab8f-e883-43d2-b3e4-45f3d0f2469b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.626804] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 868.626804] env[62109]: value = "task-1116665" [ 868.626804] env[62109]: _type = "Task" [ 868.626804] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.638861] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.769891] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116663, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.789024] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c60c8b-6f44-a160-c9aa-cb387b4debf0, 'name': SearchDatastore_Task, 'duration_secs': 0.015158} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.790107] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f0f0b3-60a3-4632-8dc4-033394bdde99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.797993] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 868.797993] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b44d45-c217-ddba-7f37-4c0aa73eff5c" [ 868.797993] env[62109]: _type = "Task" [ 868.797993] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.807466] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b44d45-c217-ddba-7f37-4c0aa73eff5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.909239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1dc3f83e-943b-42ae-a842-dfb406b97152 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.874s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.978601] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a9f9bc25-f54b-496c-9908-c408292c1b6c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.914s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.987058] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116662, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.004168] env[62109]: DEBUG oslo_vmware.api [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.013337] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f3d1fcde-b326-47ac-b285-410b82dcc8f8 tempest-ServersTestFqdnHostnames-792571634 tempest-ServersTestFqdnHostnames-792571634-project-member] Lock "9b2968bb-ed06-4740-b43e-b4aa1fac76dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.257s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.140462] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116665, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.173144] env[62109]: DEBUG nova.network.neutron [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updated VIF entry in instance network info cache for port a54ea2c9-4872-4e92-893b-ad7c797f25ac. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.173804] env[62109]: DEBUG nova.network.neutron [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b48845a-ae70-493f-8ea7-542088d62859", "address": "fa:16:3e:65:6d:4f", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b48845a-ae", "ovs_interfaceid": "9b48845a-ae70-493f-8ea7-542088d62859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "address": "fa:16:3e:40:13:3b", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54ea2c9-48", "ovs_interfaceid": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.270038] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116663, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528274} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.270038] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 869.270788] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e279e2a-8fe8-458a-8710-8b6e86876c91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.302200] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.302840] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a57c74ee-054a-482f-ada5-35fecd9e4e86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.327263] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b44d45-c217-ddba-7f37-4c0aa73eff5c, 'name': SearchDatastore_Task, 'duration_secs': 0.010741} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.329787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.330029] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 869.330340] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 869.330340] env[62109]: value = "task-1116666" [ 869.330340] env[62109]: _type = "Task" [ 869.330340] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.330533] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdf033cb-16d4-4fb2-84c2-585193b6d3e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.344859] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116666, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.346811] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 869.346811] env[62109]: value = "task-1116667" [ 869.346811] env[62109]: _type = "Task" [ 869.346811] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.359858] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.487334] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116662, 'name': CloneVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.497041] env[62109]: DEBUG oslo_vmware.api [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116664, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.641998] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581473} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.645930] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 869.646286] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.646923] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-facb1ea5-9794-4b5c-9c4e-dddb5cd5003b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.659777] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 869.659777] env[62109]: value = "task-1116668" [ 869.659777] env[62109]: _type = "Task" [ 869.659777] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.676833] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.677692] env[62109]: DEBUG oslo_concurrency.lockutils [req-d0cfeef9-b21f-4d9c-9757-39cc2e635120 req-8ad577b6-7b0b-4a00-b2a2-35f477cb87b1 service nova] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.844900] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116666, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.861866] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116667, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495081} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.862593] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 869.863309] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.863787] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e8c0065-499b-400a-be5d-16971055e7de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.876027] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 869.876027] env[62109]: value = "task-1116669" [ 869.876027] env[62109]: _type = "Task" [ 869.876027] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.886410] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.981185] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a61a7c3-ae0e-462c-9c65-7a02b84cc346 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.995735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea44c36-869d-472c-9772-6a857c8b2b6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.999785] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116662, 'name': CloneVM_Task, 'duration_secs': 1.600369} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.000404] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Created linked-clone VM from snapshot [ 870.001805] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6aa930-6e55-4097-912b-eec389dd593c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.007762] env[62109]: DEBUG oslo_vmware.api [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116664, 'name': ReconfigVM_Task, 'duration_secs': 1.0771} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.034220] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.034451] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfigured VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 870.041877] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3458b1b9-b545-4251-8145-59bbf077ba3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.045171] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Uploading image ee68a607-5d22-48e9-a6df-4918df414238 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 870.053132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba212077-5bf6-45bf-a58f-11103b242755 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.071989] env[62109]: DEBUG nova.compute.provider_tree [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.080763] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 870.080763] env[62109]: value = "vm-244472" [ 870.080763] env[62109]: _type = "VirtualMachine" [ 870.080763] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 870.081301] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7fcb48a4-6567-41b2-a18a-e448ade4532e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.090354] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease: (returnval){ [ 870.090354] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d713f5-573d-ad44-ffc3-0cb49cc6a668" [ 870.090354] env[62109]: _type = "HttpNfcLease" [ 870.090354] env[62109]: } obtained for exporting VM: (result){ [ 870.090354] env[62109]: value = "vm-244472" [ 870.090354] env[62109]: _type = "VirtualMachine" [ 870.090354] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 870.090737] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the lease: (returnval){ [ 870.090737] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d713f5-573d-ad44-ffc3-0cb49cc6a668" [ 870.090737] env[62109]: _type = "HttpNfcLease" [ 870.090737] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 870.098931] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.098931] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d713f5-573d-ad44-ffc3-0cb49cc6a668" [ 870.098931] env[62109]: _type = "HttpNfcLease" [ 870.098931] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.174030] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179034} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.174030] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.174030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b171aa-b300-46b5-8a0d-22477f52ebf0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.205908] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.207111] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d77a4c95-54ad-4f18-bd07-0498eac7cc5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.228285] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 870.228285] env[62109]: value = "task-1116671" [ 870.228285] env[62109]: _type = "Task" [ 870.228285] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.237467] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.344936] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116666, 'name': ReconfigVM_Task, 'duration_secs': 0.584068} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.345276] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.346173] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bbf107-1bfa-4594-ab19-f09fdf1eed1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.373333] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5688ac2-4425-43ff-bc5a-0c80b1907cbd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.396103] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07507} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.397693] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.398056] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 870.398056] env[62109]: value = "task-1116672" [ 870.398056] env[62109]: _type = "Task" [ 870.398056] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.398753] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5b6494-cf27-4765-be90-952f04f1fb7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.410757] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116672, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.428364] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.428748] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0b8b9b0-b666-47d6-ab3e-8c6b2696511e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.452334] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 870.452334] env[62109]: value = "task-1116673" [ 870.452334] env[62109]: _type = "Task" [ 870.452334] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.461981] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116673, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.476825] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.477317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.540294] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88dfe78b-80d9-4b36-a3e5-9ba4de203436 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-a54ea2c9-4872-4e92-893b-ad7c797f25ac" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.948s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.578059] env[62109]: DEBUG nova.scheduler.client.report [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.603046] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.603046] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d713f5-573d-ad44-ffc3-0cb49cc6a668" [ 870.603046] env[62109]: _type = "HttpNfcLease" [ 870.603046] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 870.603888] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 870.603888] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d713f5-573d-ad44-ffc3-0cb49cc6a668" [ 870.603888] env[62109]: _type = "HttpNfcLease" [ 870.603888] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 870.605018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3036d3f4-0174-455d-9216-cf93549712eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.617107] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 870.617577] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 870.740383] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116671, 'name': ReconfigVM_Task, 'duration_secs': 0.41104} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.740765] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.741531] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1785eddf-b70b-44b4-9d51-00ba7d33b1c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.748861] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-13ec25fd-a379-4a2a-9ca0-65acfc496e6c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.752479] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 870.752479] env[62109]: value = "task-1116674" [ 870.752479] env[62109]: _type = "Task" [ 870.752479] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.763208] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116674, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.917456] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116672, 'name': ReconfigVM_Task, 'duration_secs': 0.279859} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.918439] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 870.922755] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb70fec6-05ce-4033-9302-fda879de5925 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.931523] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 870.931523] env[62109]: value = "task-1116675" [ 870.931523] env[62109]: _type = "Task" [ 870.931523] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.941529] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.963954] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116673, 'name': ReconfigVM_Task, 'duration_secs': 0.304999} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.964332] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Reconfigured VM instance instance-00000048 to attach disk [datastore2] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.964933] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb4b030e-c6cf-48f6-a756-6583d27929cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.974107] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 870.974107] env[62109]: value = "task-1116676" [ 870.974107] env[62109]: _type = "Task" [ 870.974107] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.982826] env[62109]: INFO nova.compute.manager [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Detaching volume 54e65dec-9bc6-4ad5-85d6-378dd519a464 [ 870.994860] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116676, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.048024] env[62109]: INFO nova.virt.block_device [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Attempting to driver detach volume 54e65dec-9bc6-4ad5-85d6-378dd519a464 from mountpoint /dev/sdb [ 871.048024] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 871.048024] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244463', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'name': 'volume-54e65dec-9bc6-4ad5-85d6-378dd519a464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '342b7069-22fb-4934-9ec3-8ecbc987696e', 'attached_at': '', 'detached_at': '', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'serial': '54e65dec-9bc6-4ad5-85d6-378dd519a464'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 871.050518] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6372a2-bdf3-4299-86ce-f6e34ceb60cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.086949] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625e6217-a2f3-4626-9655-d5b2fb55f38d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.090466] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.091047] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 871.096371] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.525s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.096371] env[62109]: DEBUG nova.objects.instance [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lazy-loading 'resources' on Instance uuid af3465db-fd56-458d-a499-14df3a0029f0 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.104433] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6161ea0e-13a5-4923-ad93-1f014aec1a26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.129338] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aec159-a85c-42ed-9f05-25caacf9e0f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.150446] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] The volume has not been displaced from its original location: [datastore1] volume-54e65dec-9bc6-4ad5-85d6-378dd519a464/volume-54e65dec-9bc6-4ad5-85d6-378dd519a464.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 871.156294] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfiguring VM instance instance-0000003b to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 871.157192] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7740e44f-b2bf-44ef-a1f7-5fdff7fcd5eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.179695] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 871.179695] env[62109]: value = "task-1116677" [ 871.179695] env[62109]: _type = "Task" [ 871.179695] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.189825] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.268148] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116674, 'name': Rename_Task, 'duration_secs': 0.20776} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.268148] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 871.268148] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9741b574-7d1c-4d1c-bb39-1a3d75692bb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.275499] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 871.275499] env[62109]: value = "task-1116678" [ 871.275499] env[62109]: _type = "Task" [ 871.275499] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.285913] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.447150] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116675, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.484846] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116676, 'name': Rename_Task, 'duration_secs': 0.174971} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.485880] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 871.485880] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-173d5324-b0e4-41a0-991b-598eed73c09c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.496499] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 871.496499] env[62109]: value = "task-1116679" [ 871.496499] env[62109]: _type = "Task" [ 871.496499] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.507697] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.598046] env[62109]: DEBUG nova.compute.utils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 871.603272] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 871.605531] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 871.667802] env[62109]: DEBUG nova.policy [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfa8e274a59f4086bfb08cf73dac8315', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd47712667550407d8846659ec113017b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 871.694115] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116677, 'name': ReconfigVM_Task, 'duration_secs': 0.319785} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.697482] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Reconfigured VM instance instance-0000003b to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 871.703424] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-887e1473-c9c5-4593-b423-fa3dadb708d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.722566] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 871.722566] env[62109]: value = "task-1116680" [ 871.722566] env[62109]: _type = "Task" [ 871.722566] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.733048] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116680, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.790904] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116678, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.948300] env[62109]: DEBUG oslo_vmware.api [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116675, 'name': PowerOnVM_Task, 'duration_secs': 0.668186} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.950335] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 871.952166] env[62109]: DEBUG nova.compute.manager [None req-4d2addaa-6027-46db-ba0c-6f403df8eeb4 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 871.953666] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e342a69d-95f9-43c5-8c4b-8f30db5fdbbb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.993655] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully created port: 89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.012275] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116679, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.106727] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 872.150437] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccde9f4-0bb6-41aa-8805-52d08447c3cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.159219] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7191008f-e664-40a3-9765-c4d1c4d66390 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.195083] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b4a23a-3231-45dc-a92d-8263fdbd591c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.203747] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bc744d-b36f-4805-a11b-acd1b0c0858f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.220705] env[62109]: DEBUG nova.compute.provider_tree [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.235213] env[62109]: DEBUG oslo_vmware.api [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116680, 'name': ReconfigVM_Task, 'duration_secs': 0.20617} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.236099] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244463', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'name': 'volume-54e65dec-9bc6-4ad5-85d6-378dd519a464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '342b7069-22fb-4934-9ec3-8ecbc987696e', 'attached_at': '', 'detached_at': '', 'volume_id': '54e65dec-9bc6-4ad5-85d6-378dd519a464', 'serial': '54e65dec-9bc6-4ad5-85d6-378dd519a464'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 872.288198] env[62109]: DEBUG oslo_vmware.api [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116678, 'name': PowerOnVM_Task, 'duration_secs': 0.680113} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.289142] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully created port: a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.290719] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 872.290945] env[62109]: INFO nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Took 8.08 seconds to spawn the instance on the hypervisor. [ 872.291150] env[62109]: DEBUG nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.291988] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f940607-b094-4c04-aa3b-2634b9bff9a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.511945] env[62109]: DEBUG oslo_vmware.api [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116679, 'name': PowerOnVM_Task, 'duration_secs': 0.545841} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.512264] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 872.512496] env[62109]: INFO nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Took 5.74 seconds to spawn the instance on the hypervisor. [ 872.512749] env[62109]: DEBUG nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.513731] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569029cf-1899-4e58-8d85-09fb2da45b4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.594457] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully created port: 79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.724751] env[62109]: DEBUG nova.scheduler.client.report [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.802036] env[62109]: DEBUG nova.objects.instance [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'flavor' on Instance uuid 342b7069-22fb-4934-9ec3-8ecbc987696e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.819791] env[62109]: INFO nova.compute.manager [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Took 35.46 seconds to build instance. [ 873.037131] env[62109]: INFO nova.compute.manager [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Took 34.24 seconds to build instance. [ 873.121298] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 873.232218] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.235208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.334s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.235464] env[62109]: DEBUG nova.objects.instance [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lazy-loading 'resources' on Instance uuid 55381bef-dab5-44cd-97fe-9fc75ab61d0e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.258030] env[62109]: INFO nova.scheduler.client.report [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleted allocations for instance af3465db-fd56-458d-a499-14df3a0029f0 [ 873.322257] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ad75403f-5f5e-4d21-8a6f-849dfa6f5e27 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.968s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.539172] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95360707-4f85-4803-9f21-261059c24e3c tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.602s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.766715] env[62109]: DEBUG oslo_concurrency.lockutils [None req-10c72260-0133-4324-8303-b70bb817184b tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "af3465db-fd56-458d-a499-14df3a0029f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.187s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.810315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b27033-e365-4034-91a7-57ffc01d73f7 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.333s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.112017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c911f06a-6830-4526-ac0f-d1a66a3fcab1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.120646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76834d1d-28a4-49bb-8134-cfbf00c96fed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.153788] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dbee09-b451-43be-9948-9748a33add67 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.161746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-9b48845a-ae70-493f-8ea7-542088d62859" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.162255] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-9b48845a-ae70-493f-8ea7-542088d62859" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.164384] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a4f3f5-5b5d-4e33-bc7b-1e8e8ac6e3ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.180453] env[62109]: DEBUG nova.compute.provider_tree [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.377511] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully updated port: 89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.669963] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.670191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.671148] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf68566-599b-4855-9852-c60b45f7a840 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.696770] env[62109]: DEBUG nova.scheduler.client.report [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.701945] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade9fb73-dfda-4239-9ffb-418dc387f193 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.739572] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfiguring VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 874.740756] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71082a79-fb65-417b-9c34-771989accfda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.766743] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 874.766743] env[62109]: value = "task-1116681" [ 874.766743] env[62109]: _type = "Task" [ 874.766743] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.778589] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.208391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.211014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 27.016s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.235000] env[62109]: INFO nova.scheduler.client.report [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Deleted allocations for instance 55381bef-dab5-44cd-97fe-9fc75ab61d0e [ 875.278586] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.713796] env[62109]: DEBUG nova.objects.instance [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lazy-loading 'migration_context' on Instance uuid 32cccd30-278c-48b6-8855-5cd76c2da057 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.748837] env[62109]: DEBUG oslo_concurrency.lockutils [None req-023bd10a-770a-454f-8278-b620b8d3dc9e tempest-ListImageFiltersTestJSON-744193482 tempest-ListImageFiltersTestJSON-744193482-project-member] Lock "55381bef-dab5-44cd-97fe-9fc75ab61d0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.956s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.778057] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.280708] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.602472] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully updated port: a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.637238] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88676697-5e16-4844-9682-bf25a42bdd5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.648181] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5217c1-2dfd-45e3-bdd5-a52b4f195b1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.681343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036775e3-b149-4e1d-93d3-4805b57f269a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.690297] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf56717-f72d-4b46-a237-b1f3d03deace {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.704971] env[62109]: DEBUG nova.compute.provider_tree [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.779860] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.199280] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.199280] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.199280] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.199280] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.199453] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.199481] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.199742] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.199861] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.200054] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.200289] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.201145] env[62109]: DEBUG nova.virt.hardware [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.202566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93ed313-912e-4169-b270-4352073c920a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.207409] env[62109]: DEBUG nova.scheduler.client.report [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.215160] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 877.218054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002d2945-cdd0-4fcf-9c25-a0d892111610 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.221798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cc25a2-c9ff-4553-972e-88210d6cb50c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.241802] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 877.241948] env[62109]: ERROR oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk due to incomplete transfer. [ 877.242210] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e413fb00-bfe5-4b00-a39b-9def4c20b46b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.253134] env[62109]: DEBUG oslo_vmware.rw_handles [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525454cf-aad7-13c3-d043-fd80213c951b/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 877.253391] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Uploaded image 44e4d408-7967-4020-b05c-e6a1f4131968 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 877.258535] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 877.258535] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a6fa5f7e-4f9f-4997-a834-1dad8ce5b078 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.264431] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 877.264431] env[62109]: value = "task-1116682" [ 877.264431] env[62109]: _type = "Task" [ 877.264431] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.286236] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.286510] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116682, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.787462] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116682, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.788173] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.814973] env[62109]: INFO nova.compute.manager [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Rebuilding instance [ 877.878694] env[62109]: DEBUG nova.compute.manager [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.880080] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718db973-cf40-4c60-8cf1-2b7c06df7f16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.917279] env[62109]: DEBUG nova.compute.manager [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-plugged-89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.917279] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.917279] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.917279] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.917782] env[62109]: DEBUG nova.compute.manager [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] No waiting events found dispatching network-vif-plugged-89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.917782] env[62109]: WARNING nova.compute.manager [req-f0350b6e-09ef-45bf-b655-b7a7de347566 req-ba718857-c71b-47c8-8ddd-9bc9ece16161 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received unexpected event network-vif-plugged-89be5c13-2cda-47b1-a962-1d53a02d7a5e for instance with vm_state building and task_state spawning. [ 878.232186] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.019s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.241080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.081s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.242879] env[62109]: INFO nova.compute.claims [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.285013] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116682, 'name': Destroy_Task, 'duration_secs': 0.654042} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.290245] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroyed the VM [ 878.292020] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 878.292020] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.292020] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-71ed592f-508e-4629-b787-efbacc677b05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.302547] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 878.302547] env[62109]: value = "task-1116683" [ 878.302547] env[62109]: _type = "Task" [ 878.302547] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.312683] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116683, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.392389] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 878.393016] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c321ff6-4199-4864-a5ff-73338128381f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.402816] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 878.402816] env[62109]: value = "task-1116684" [ 878.402816] env[62109]: _type = "Task" [ 878.402816] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.414345] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.688901] env[62109]: INFO nova.compute.manager [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Unrescuing [ 878.689390] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.689686] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.689896] env[62109]: DEBUG nova.network.neutron [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.705851] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Successfully updated port: 79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 878.783361] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.812903] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116683, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.895860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.895860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.895860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.895860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.895860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.898346] env[62109]: INFO nova.compute.manager [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Terminating instance [ 878.902142] env[62109]: DEBUG nova.compute.manager [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 878.902142] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 878.902142] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dc4952-1d12-4225-8132-121b1047a14c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.916731] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116684, 'name': PowerOffVM_Task, 'duration_secs': 0.145088} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.919854] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 878.920608] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 878.921047] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 878.921985] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d80636-23a8-4eed-a49f-2ba8020d311b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.924985] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d22e45d-bf6b-4761-8cb5-f69fba3cdcdb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.933021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 878.933021] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a6fc497-cbfc-4d43-803b-612252a3edb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.935824] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 878.935824] env[62109]: value = "task-1116685" [ 878.935824] env[62109]: _type = "Task" [ 878.935824] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.946147] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.972021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 878.972021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 878.972021] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleting the datastore file [datastore2] ac068268-1243-466e-8cd5-1ee2bc248ecd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.972021] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fadb9e6-e52b-4ca3-8cf8-65dd7952f9f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.979210] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 878.979210] env[62109]: value = "task-1116687" [ 878.979210] env[62109]: _type = "Task" [ 878.979210] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.990326] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.032020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "c753a2db-d701-4508-88bd-4ebe4f32a075" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.032020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.032020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.032020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.032020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.035082] env[62109]: INFO nova.compute.manager [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Terminating instance [ 879.038951] env[62109]: DEBUG nova.compute.manager [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.038951] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 879.041679] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c102875-73a7-416f-81d1-4a29dc5ba674 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.052086] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 879.052277] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc93b160-04f7-444c-a686-40143e7e2c6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.062715] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 879.062715] env[62109]: value = "task-1116688" [ 879.062715] env[62109]: _type = "Task" [ 879.062715] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.073383] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.094943] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.095405] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.210353] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.210586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.210798] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.284265] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.327057] env[62109]: DEBUG oslo_vmware.api [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116683, 'name': RemoveSnapshot_Task, 'duration_secs': 0.952498} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.327644] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 879.328558] env[62109]: INFO nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 16.14 seconds to snapshot the instance on the hypervisor. [ 879.453072] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116685, 'name': PowerOffVM_Task, 'duration_secs': 0.297878} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.453422] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 879.453843] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 879.454403] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f95145f-e0ba-47b1-b4d7-658f8f43db19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.497297] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319637} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.497805] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.497951] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 879.498234] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 879.512769] env[62109]: DEBUG nova.network.neutron [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [{"id": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "address": "fa:16:3e:f8:8b:04", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8bdd11-0b", "ovs_interfaceid": "4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.580310] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116688, 'name': PowerOffVM_Task, 'duration_secs': 0.419917} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.580600] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 879.581188] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 879.581188] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad84aa09-d972-479e-bfee-b454a5eb21b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.600476] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 879.746295] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00775a9-e7fa-48c9-91a6-33a9f315aa12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.756524] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749c3840-fdd1-4b72-a095-8e4227d556d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.793136] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.801250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef778097-5e33-4ccc-96c1-255fcb4ff38e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.809133] env[62109]: INFO nova.compute.manager [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Swapping old allocation on dict_keys(['574e9717-c25e-453d-8028-45d9e2f95398']) held by migration a7f68b5f-2580-46ec-a485-50b41584f55e for instance [ 879.818133] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.818519] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 879.821822] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 879.821822] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleting the datastore file [datastore1] 342b7069-22fb-4934-9ec3-8ecbc987696e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.822164] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98bc77d0-23de-4a9f-acf8-fd6cab30ca12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.823855] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 879.824122] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 879.824242] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleting the datastore file [datastore1] c753a2db-d701-4508-88bd-4ebe4f32a075 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.827703] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b5c9a1-f429-4fc5-8b17-d4bdaf8b9a32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.832718] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-254dac91-7fe8-4485-bcc2-5fca04204603 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.852120] env[62109]: DEBUG nova.compute.provider_tree [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.854459] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for the task: (returnval){ [ 879.854459] env[62109]: value = "task-1116692" [ 879.854459] env[62109]: _type = "Task" [ 879.854459] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.854827] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 879.854827] env[62109]: value = "task-1116691" [ 879.854827] env[62109]: _type = "Task" [ 879.854827] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.871103] env[62109]: DEBUG nova.scheduler.client.report [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Overwriting current allocation {'allocations': {'574e9717-c25e-453d-8028-45d9e2f95398': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 93}}, 'project_id': '88ac845ffffe44d9a1127254f08fce91', 'user_id': '53d26e0515864175963777284ca6d342', 'consumer_generation': 1} on consumer 32cccd30-278c-48b6-8855-5cd76c2da057 {{(pid=62109) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 879.882359] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.882872] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.906149] env[62109]: DEBUG nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Found 3 images (rotation: 2) {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 879.906149] env[62109]: DEBUG nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Rotating out 1 backups {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4562}} [ 879.906149] env[62109]: DEBUG nova.compute.manager [None req-fdb8eb85-dc56-4fec-b550-d2d99d419156 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleting image 871af3c4-4325-4264-8f1d-6cfc2a52477d {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4567}} [ 880.015971] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.016916] env[62109]: DEBUG nova.objects.instance [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'flavor' on Instance uuid 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.099296] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.099500] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquired lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.099729] env[62109]: DEBUG nova.network.neutron [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 880.132043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.233199] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-changed-89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.233199] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing instance network info cache due to event network-changed-89be5c13-2cda-47b1-a962-1d53a02d7a5e. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 880.233435] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquiring lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.310376] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.360055] env[62109]: DEBUG nova.scheduler.client.report [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.399599] env[62109]: DEBUG oslo_vmware.api [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Task: {'id': task-1116692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243089} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.400406] env[62109]: DEBUG oslo_vmware.api [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187545} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.400406] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.400406] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 880.400559] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.400713] env[62109]: INFO nova.compute.manager [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Took 1.36 seconds to destroy the instance on the hypervisor. [ 880.401470] env[62109]: DEBUG oslo.service.loopingcall [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.401470] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.401470] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 880.401636] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.401763] env[62109]: INFO nova.compute.manager [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Took 1.50 seconds to destroy the instance on the hypervisor. [ 880.401986] env[62109]: DEBUG oslo.service.loopingcall [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.402204] env[62109]: DEBUG nova.compute.manager [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.402317] env[62109]: DEBUG nova.network.neutron [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.404558] env[62109]: DEBUG nova.compute.manager [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.404763] env[62109]: DEBUG nova.network.neutron [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.523145] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9c5a7c-de3d-4ee0-aec8-d644d3541e93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.550549] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 880.556492] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38b246e8-6008-465b-8dc3-656a73463194 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.563191] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 880.563191] env[62109]: value = "task-1116693" [ 880.563191] env[62109]: _type = "Task" [ 880.563191] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.565964] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.566292] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.566463] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.566651] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.566899] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.567122] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.567310] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.567417] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.567641] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.567928] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.568151] env[62109]: DEBUG nova.virt.hardware [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.569085] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183874d5-be82-441e-af29-8008ab2fe02c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.585415] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.587562] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44af88e9-bc50-4332-8850-f436e1e64fc9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.607174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.613154] env[62109]: DEBUG oslo.service.loopingcall [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.613482] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 880.614102] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6f9d4f3-7cb0-457a-9f04-ccf5ebd94d76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.634255] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.634255] env[62109]: value = "task-1116694" [ 880.634255] env[62109]: _type = "Task" [ 880.634255] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.642732] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116694, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.754337] env[62109]: DEBUG nova.network.neutron [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c26dde-062f-4334-8ba6-683c21a284d8", "address": "fa:16:3e:07:70:33", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c26dde-06", "ovs_interfaceid": "79c26dde-062f-4334-8ba6-683c21a284d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.808782] env[62109]: DEBUG oslo_vmware.api [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116681, 'name': ReconfigVM_Task, 'duration_secs': 5.865807} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.808782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.808782] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Reconfigured VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 880.872452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.873343] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.877279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.722s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.877279] env[62109]: DEBUG nova.objects.instance [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 881.079198] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116693, 'name': PowerOffVM_Task, 'duration_secs': 0.358976} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.079596] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 881.084743] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 881.086994] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51905036-bd12-4465-8c49-723553965d7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.101628] env[62109]: DEBUG nova.compute.manager [req-bd5377db-5cb4-4d3b-b416-b60f42d4dce6 req-f1845701-f675-4e11-b357-2fbcd3208247 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Received event network-vif-deleted-23e271d5-4d3a-4ad9-934f-4123916de8c1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.102065] env[62109]: INFO nova.compute.manager [req-bd5377db-5cb4-4d3b-b416-b60f42d4dce6 req-f1845701-f675-4e11-b357-2fbcd3208247 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Neutron deleted interface 23e271d5-4d3a-4ad9-934f-4123916de8c1; detaching it from the instance and deleting it from the info cache [ 881.102065] env[62109]: DEBUG nova.network.neutron [req-bd5377db-5cb4-4d3b-b416-b60f42d4dce6 req-f1845701-f675-4e11-b357-2fbcd3208247 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.113967] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 881.113967] env[62109]: value = "task-1116695" [ 881.113967] env[62109]: _type = "Task" [ 881.113967] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.125086] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116695, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.143181] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116694, 'name': CreateVM_Task, 'duration_secs': 0.330863} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.143378] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 881.143919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.144204] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.144583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.145188] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2069785-bac1-465d-befd-a1271b8de751 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.153016] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 881.153016] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523e66d4-a5c7-52cb-f55d-641c94c29cab" [ 881.153016] env[62109]: _type = "Task" [ 881.153016] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.164206] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523e66d4-a5c7-52cb-f55d-641c94c29cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.215097] env[62109]: DEBUG nova.network.neutron [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [{"id": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "address": "fa:16:3e:f0:07:2b", "network": {"id": "4fcb7814-4de2-430d-af81-55131504c5bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2d2be1e2322b4c87945fff0cd79d3c7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde4056dc-a5", "ovs_interfaceid": "de4056dc-a527-43f0-ad81-f82e5cb00f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.259451] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.260921] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Instance network_info: |[{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c26dde-062f-4334-8ba6-683c21a284d8", "address": "fa:16:3e:07:70:33", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c26dde-06", "ovs_interfaceid": "79c26dde-062f-4334-8ba6-683c21a284d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 881.260921] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquired lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.260921] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing network info cache for port 89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 881.261975] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:bb:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89be5c13-2cda-47b1-a962-1d53a02d7a5e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:8b:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d650b26-c3e7-4de7-98db-5e4b816d123a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a15cf7b7-f572-459b-9018-141b9c7f1da9', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:70:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79c26dde-062f-4334-8ba6-683c21a284d8', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.276356] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Creating folder: Project (d47712667550407d8846659ec113017b). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 881.281035] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8737217-fc98-4263-a6b7-12576f4272b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.309674] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Created folder: Project (d47712667550407d8846659ec113017b) in parent group-v244329. [ 881.309883] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Creating folder: Instances. Parent ref: group-v244474. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 881.311302] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5771002c-270b-4c7a-ae60-dcb7de0875a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.337840] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Created folder: Instances in parent group-v244474. [ 881.340407] env[62109]: DEBUG oslo.service.loopingcall [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.340407] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 881.340407] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a07e25e-9942-4ac8-a27c-f946f82f70f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.371834] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.371834] env[62109]: value = "task-1116698" [ 881.371834] env[62109]: _type = "Task" [ 881.371834] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.385137] env[62109]: DEBUG nova.compute.utils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.389031] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116698, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.389553] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.389719] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 881.440243] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 881.442410] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699f406c-f3cc-4d43-bb25-11bc6e2be933 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.450481] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 881.450695] env[62109]: ERROR oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk due to incomplete transfer. [ 881.450951] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d12a3b32-b444-41b7-94eb-80418acab349 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.461732] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f3eec3-8a98-5ffa-ca74-953e6c7f4c59/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 881.462438] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Uploaded image ee68a607-5d22-48e9-a6df-4918df414238 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 881.465926] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 881.466702] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9c3b0d51-bac7-46bf-a9d7-bd5d1715444d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.476762] env[62109]: DEBUG nova.policy [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '438406e9dfab4ad3b6dc44166235dd2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d9fbfd06f2245f2b741e25fc30270b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.479242] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 881.479242] env[62109]: value = "task-1116699" [ 881.479242] env[62109]: _type = "Task" [ 881.479242] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.491625] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116699, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.515221] env[62109]: DEBUG nova.network.neutron [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.608805] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69d52b0c-6ccf-42be-831d-f3267103e5f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.621836] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac74b4d4-dfbd-4862-9eb6-5d6dc03bd674 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.639482] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116695, 'name': ReconfigVM_Task, 'duration_secs': 0.457031} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.640147] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 881.640471] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 881.640952] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-505e3863-9d96-4789-81a8-52b22575d09f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.658576] env[62109]: DEBUG nova.compute.manager [req-bd5377db-5cb4-4d3b-b416-b60f42d4dce6 req-f1845701-f675-4e11-b357-2fbcd3208247 service nova] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Detach interface failed, port_id=23e271d5-4d3a-4ad9-934f-4123916de8c1, reason: Instance c753a2db-d701-4508-88bd-4ebe4f32a075 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 881.664577] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 881.664577] env[62109]: value = "task-1116700" [ 881.664577] env[62109]: _type = "Task" [ 881.664577] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.677153] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523e66d4-a5c7-52cb-f55d-641c94c29cab, 'name': SearchDatastore_Task, 'duration_secs': 0.010954} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.678169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.678508] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.679169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.679485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.679847] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.684222] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b725860-44b6-4a7e-ab77-5b8f25679406 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.687318] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116700, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.695436] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.695436] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 881.697944] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b31f40e-c585-482c-8104-9ae0b86359b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.704627] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 881.704627] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527ee919-4fca-0630-c0d3-7c17fec4ca62" [ 881.704627] env[62109]: _type = "Task" [ 881.704627] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.714616] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527ee919-4fca-0630-c0d3-7c17fec4ca62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.717687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Releasing lock "refresh_cache-32cccd30-278c-48b6-8855-5cd76c2da057" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.718263] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 881.718549] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4d08cdd-73a0-4a7e-8ec0-4fbff6347e64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.726492] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 881.726492] env[62109]: value = "task-1116701" [ 881.726492] env[62109]: _type = "Task" [ 881.726492] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.736533] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.807498] env[62109]: DEBUG nova.compute.manager [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 881.885979] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116698, 'name': CreateVM_Task, 'duration_secs': 0.500755} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.886346] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 881.887136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.887317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.887739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.890186] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df70dc3a-6a27-452b-8882-73f1f0663cf1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.892486] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 881.899531] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a053e20-a24d-4c93-8452-ce3826979ee1 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.899531] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.419s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.899531] env[62109]: DEBUG nova.objects.instance [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lazy-loading 'resources' on Instance uuid 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.903082] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 881.903082] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ca0d88-4d82-7ae4-599d-8f7fbaeb50cf" [ 881.903082] env[62109]: _type = "Task" [ 881.903082] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.914220] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ca0d88-4d82-7ae4-599d-8f7fbaeb50cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.999052] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116699, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.013307] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Successfully created port: a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.018135] env[62109]: INFO nova.compute.manager [-] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Took 1.62 seconds to deallocate network for instance. [ 882.021251] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updated VIF entry in instance network info cache for port 89be5c13-2cda-47b1-a962-1d53a02d7a5e. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 882.021251] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c26dde-062f-4334-8ba6-683c21a284d8", "address": "fa:16:3e:07:70:33", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c26dde-06", "ovs_interfaceid": "79c26dde-062f-4334-8ba6-683c21a284d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.181403] env[62109]: DEBUG oslo_vmware.api [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116700, 'name': PowerOnVM_Task, 'duration_secs': 0.509588} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.181403] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 882.181403] env[62109]: DEBUG nova.compute.manager [None req-1641e007-5501-47c3-a683-08306d924979 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 882.181403] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc99cd2d-4e93-4059-9dc5-3a88c22e2e6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.217068] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527ee919-4fca-0630-c0d3-7c17fec4ca62, 'name': SearchDatastore_Task, 'duration_secs': 0.014451} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.217922] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e42d4fe2-1963-47d0-a298-1968d6fdd855 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.225903] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 882.225903] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524871f2-df42-d1d6-bbc6-50750554e6b5" [ 882.225903] env[62109]: _type = "Task" [ 882.225903] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.250973] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524871f2-df42-d1d6-bbc6-50750554e6b5, 'name': SearchDatastore_Task, 'duration_secs': 0.022111} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.254643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.254939] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 882.255810] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116701, 'name': PowerOffVM_Task, 'duration_secs': 0.34725} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.255810] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f31c8b9-3495-42ec-a6b3-1001e9cb12b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.258156] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 882.259104] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:53:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='909148fc-609f-4b07-aa86-4e8ce4ae4b3f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-757017005',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.259221] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.259294] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.259498] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.259657] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.259810] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.260034] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.260350] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.260350] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.260515] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.260689] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.266153] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2218df66-c481-4dd8-bceb-3ab23dfb200e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.285935] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 882.285935] env[62109]: value = "task-1116702" [ 882.285935] env[62109]: _type = "Task" [ 882.285935] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.288194] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 882.288194] env[62109]: value = "task-1116703" [ 882.288194] env[62109]: _type = "Task" [ 882.288194] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.307098] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.307646] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116703, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.336304] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.421610] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ca0d88-4d82-7ae4-599d-8f7fbaeb50cf, 'name': SearchDatastore_Task, 'duration_secs': 0.017852} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.422037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.422368] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.422701] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.422976] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.423201] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.423524] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-218b9266-779f-42e5-bde9-5aac30a8922e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.440671] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.440917] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 882.442360] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2357f90c-70cc-436b-a823-6ba0611a7660 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.454984] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 882.454984] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526b777d-f062-8910-a81c-af5a486ef224" [ 882.454984] env[62109]: _type = "Task" [ 882.454984] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.470819] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526b777d-f062-8910-a81c-af5a486ef224, 'name': SearchDatastore_Task, 'duration_secs': 0.01183} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.475493] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-989699f3-3c33-4137-a8d3-93491b7786d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.486359] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 882.486359] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52578aa9-df8d-fcd9-8d47-16a34cdc39fb" [ 882.486359] env[62109]: _type = "Task" [ 882.486359] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.497481] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116699, 'name': Destroy_Task, 'duration_secs': 0.606704} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.497481] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Destroyed the VM [ 882.497481] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 882.498254] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a1f9ac35-a011-44cb-8cec-8276102b39ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.504621] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52578aa9-df8d-fcd9-8d47-16a34cdc39fb, 'name': SearchDatastore_Task, 'duration_secs': 0.017933} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.507211] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.507211] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3e641c90-2358-4a1c-9af5-6ad96f722aba/3e641c90-2358-4a1c-9af5-6ad96f722aba.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 882.507211] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 882.507211] env[62109]: value = "task-1116704" [ 882.507211] env[62109]: _type = "Task" [ 882.507211] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.507211] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4e63e91-4a0c-4a79-9ecc-a3722d973d1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.524081] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116704, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.526280] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Releasing lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.526280] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-plugged-a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.526404] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.526553] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.526748] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.526941] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] No waiting events found dispatching network-vif-plugged-a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.527244] env[62109]: WARNING nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received unexpected event network-vif-plugged-a15cf7b7-f572-459b-9018-141b9c7f1da9 for instance with vm_state building and task_state spawning. [ 882.527345] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-changed-a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.527783] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing instance network info cache due to event network-changed-a15cf7b7-f572-459b-9018-141b9c7f1da9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.527783] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquiring lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.527953] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquired lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.528750] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing network info cache for port a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 882.529617] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 882.529617] env[62109]: value = "task-1116705" [ 882.529617] env[62109]: _type = "Task" [ 882.529617] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.531060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.544028] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.565947] env[62109]: DEBUG nova.network.neutron [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.572233] env[62109]: DEBUG nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-deleted-9b48845a-ae70-493f-8ea7-542088d62859 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.572233] env[62109]: INFO nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Neutron deleted interface 9b48845a-ae70-493f-8ea7-542088d62859; detaching it from the instance and deleting it from the info cache [ 882.572233] env[62109]: DEBUG nova.network.neutron [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "address": "fa:16:3e:40:13:3b", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54ea2c9-48", "ovs_interfaceid": "a54ea2c9-4872-4e92-893b-ad7c797f25ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.645225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.645225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.645341] env[62109]: DEBUG nova.network.neutron [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 882.814753] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116703, 'name': ReconfigVM_Task, 'duration_secs': 0.196042} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.821031] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116702, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.821031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6c00b6-2589-4b5d-b7cf-f17c0b2eef07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.852989] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:53:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='909148fc-609f-4b07-aa86-4e8ce4ae4b3f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-757017005',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.858752] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.859124] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.859307] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.859491] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.859651] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.860365] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.860575] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.861327] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.861327] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.861327] env[62109]: DEBUG nova.virt.hardware [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.866680] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02f6c6d5-afbb-4bc4-8fa1-c32918b4eda8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.878565] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 882.878565] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52101884-66a4-7a14-688e-86b53803f4c6" [ 882.878565] env[62109]: _type = "Task" [ 882.878565] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.899300] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52101884-66a4-7a14-688e-86b53803f4c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.906700] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.959363] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.959653] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.959834] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.960127] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.960314] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.960489] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.960714] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.960877] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.961059] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.961232] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.961407] env[62109]: DEBUG nova.virt.hardware [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.962345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b481f1b-a176-418b-af73-3d0e2ff2ec5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.974227] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba1902c-e0b2-46e2-ae55-8c15a0a70dd1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.024644] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116704, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.045692] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116705, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.067789] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0842413-b40b-4093-a51a-13a55f376751 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.073543] env[62109]: INFO nova.compute.manager [-] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Took 2.67 seconds to deallocate network for instance. [ 883.078403] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.078629] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Acquired lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.082909] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cb4427-5d04-4ccc-b641-fbd88e0339d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.089282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408f4bcf-a5a1-4fcf-a704-c085c83eee4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.107570] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Releasing lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.107980] env[62109]: WARNING nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Detach interface failed, port_id=9b48845a-ae70-493f-8ea7-542088d62859, reason: No device with interface-id 9b48845a-ae70-493f-8ea7-542088d62859 exists on VM: nova.exception.NotFound: No device with interface-id 9b48845a-ae70-493f-8ea7-542088d62859 exists on VM [ 883.108185] env[62109]: DEBUG nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Received event network-vif-deleted-982c79dd-f148-4cf1-af9e-f0ba120b13f2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.108375] env[62109]: INFO nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Neutron deleted interface 982c79dd-f148-4cf1-af9e-f0ba120b13f2; detaching it from the instance and deleting it from the info cache [ 883.108555] env[62109]: DEBUG nova.network.neutron [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.136944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.138182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.138182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.138182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.138182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.141662] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfa72ad-977f-4532-9dae-98a74d6e6d4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.144838] env[62109]: INFO nova.compute.manager [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Terminating instance [ 883.147944] env[62109]: DEBUG nova.compute.manager [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 883.147944] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.151504] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03c381f-3cc3-4565-8771-e456c3f2ede2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.158624] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9d49c8-9cc4-4d6c-9044-9389c959375b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.165998] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 883.168327] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d2a6b37-6b1f-4e96-a23c-1af21492fece {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.180362] env[62109]: DEBUG nova.compute.provider_tree [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.183214] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 883.183214] env[62109]: value = "task-1116706" [ 883.183214] env[62109]: _type = "Task" [ 883.183214] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.193886] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.308044] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647875} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.310320] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 883.310320] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.310320] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61ff6585-ef55-411d-92fb-e1da54beaa76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.317684] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 883.317684] env[62109]: value = "task-1116707" [ 883.317684] env[62109]: _type = "Task" [ 883.317684] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.327706] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.394992] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52101884-66a4-7a14-688e-86b53803f4c6, 'name': SearchDatastore_Task, 'duration_secs': 0.024883} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.401960] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 883.402387] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46f79b35-3a16-418e-a18f-efe801c16cef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.426072] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 883.426072] env[62109]: value = "task-1116708" [ 883.426072] env[62109]: _type = "Task" [ 883.426072] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.440698] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.524102] env[62109]: DEBUG oslo_vmware.api [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116704, 'name': RemoveSnapshot_Task, 'duration_secs': 0.947225} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.524478] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 883.525006] env[62109]: INFO nova.compute.manager [None req-0c77e22d-81bc-46fd-b1b4-d1bd93008369 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Took 17.71 seconds to snapshot the instance on the hypervisor. [ 883.547375] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68592} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.547721] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 3e641c90-2358-4a1c-9af5-6ad96f722aba/3e641c90-2358-4a1c-9af5-6ad96f722aba.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 883.547996] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.548280] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81528099-82e3-4246-acf9-a8054d103272 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.556925] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 883.556925] env[62109]: value = "task-1116709" [ 883.556925] env[62109]: _type = "Task" [ 883.556925] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.557883] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updated VIF entry in instance network info cache for port a15cf7b7-f572-459b-9018-141b9c7f1da9. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 883.558389] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c26dde-062f-4334-8ba6-683c21a284d8", "address": "fa:16:3e:07:70:33", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c26dde-06", "ovs_interfaceid": "79c26dde-062f-4334-8ba6-683c21a284d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.571036] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.592789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.641665] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6215330b-c94b-406c-a17c-ad345a574ea7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.659725] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0d36a0-a048-420c-9e17-1c43d3c3170c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.694453] env[62109]: DEBUG nova.scheduler.client.report [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.701132] env[62109]: DEBUG nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Detach interface failed, port_id=982c79dd-f148-4cf1-af9e-f0ba120b13f2, reason: Instance 342b7069-22fb-4934-9ec3-8ecbc987696e could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 883.701132] env[62109]: DEBUG nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-deleted-a54ea2c9-4872-4e92-893b-ad7c797f25ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.701132] env[62109]: INFO nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Neutron deleted interface a54ea2c9-4872-4e92-893b-ad7c797f25ac; detaching it from the instance and deleting it from the info cache [ 883.701132] env[62109]: DEBUG nova.network.neutron [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.711278] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116706, 'name': PowerOffVM_Task, 'duration_secs': 0.328338} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.711278] env[62109]: INFO nova.network.neutron [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Port a54ea2c9-4872-4e92-893b-ad7c797f25ac from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 883.711278] env[62109]: DEBUG nova.network.neutron [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [{"id": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "address": "fa:16:3e:0c:f9:45", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa11a7ca2-70", "ovs_interfaceid": "a11a7ca2-7088-4194-a63f-e4a9ed75ecc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.711278] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 883.711553] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 883.711553] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5155edc9-7753-41d8-ab54-94864218b931 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.829265] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140699} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.829567] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.830703] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339aaa8a-5e1e-4004-b60c-f0fd4379c12c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.852243] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.852243] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c4c9f1b-127f-4daa-bee2-ba5169f9dfaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.871530] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 883.871530] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 883.871530] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleting the datastore file [datastore2] 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.871877] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28432bb1-651f-40d6-af1c-b2c6a3b37d26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.878405] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 883.878405] env[62109]: value = "task-1116711" [ 883.878405] env[62109]: _type = "Task" [ 883.878405] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.879929] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 883.879929] env[62109]: value = "task-1116712" [ 883.879929] env[62109]: _type = "Task" [ 883.879929] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.891188] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116711, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.895343] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.936551] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116708, 'name': ReconfigVM_Task, 'duration_secs': 0.240568} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.937431] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 883.937722] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8858121-1530-443a-8d68-e704a7e1d6b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.961602] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.961927] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd04e800-00e1-4b21-82a3-3239d61b88c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.982337] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 883.982337] env[62109]: value = "task-1116713" [ 883.982337] env[62109]: _type = "Task" [ 883.982337] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.992809] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116713, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.065338] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Releasing lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.065338] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-plugged-79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.065338] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.065659] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.065696] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.065912] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] No waiting events found dispatching network-vif-plugged-79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.066107] env[62109]: WARNING nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received unexpected event network-vif-plugged-79c26dde-062f-4334-8ba6-683c21a284d8 for instance with vm_state building and task_state spawning. [ 884.066280] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-changed-79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.066440] env[62109]: DEBUG nova.compute.manager [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing instance network info cache due to event network-changed-79c26dde-062f-4334-8ba6-683c21a284d8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.066630] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquiring lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.066766] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Acquired lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.066924] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Refreshing network info cache for port 79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.080185] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07501} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.080185] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.080185] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74f0ad8-8bdd-48e4-bd30-5d98852a96b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.111281] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 3e641c90-2358-4a1c-9af5-6ad96f722aba/3e641c90-2358-4a1c-9af5-6ad96f722aba.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.111967] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ede2f127-cc2e-4df6-9b7a-6861b3501a1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.133115] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 884.133115] env[62109]: value = "task-1116714" [ 884.133115] env[62109]: _type = "Task" [ 884.133115] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.147890] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.190331] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Successfully updated port: a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.204481] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Acquiring lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.205359] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.308s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.209055] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.115s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.209515] env[62109]: INFO nova.compute.claims [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.214526] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.239836] env[62109]: INFO nova.scheduler.client.report [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Deleted allocations for instance 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8 [ 884.338314] env[62109]: DEBUG nova.compute.manager [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Received event network-vif-plugged-a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.338601] env[62109]: DEBUG oslo_concurrency.lockutils [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] Acquiring lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.338906] env[62109]: DEBUG oslo_concurrency.lockutils [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.339190] env[62109]: DEBUG oslo_concurrency.lockutils [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.339436] env[62109]: DEBUG nova.compute.manager [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] No waiting events found dispatching network-vif-plugged-a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.339696] env[62109]: WARNING nova.compute.manager [req-ab3bc41f-5a5d-43ad-a447-4a4d6f1b5695 req-18677c34-02d1-413c-adba-53c534361555 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Received unexpected event network-vif-plugged-a3f71607-a9b8-4285-bd06-1b908b502906 for instance with vm_state building and task_state spawning. [ 884.394775] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.398653] env[62109]: DEBUG oslo_vmware.api [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21763} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.400884] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.400884] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 884.400884] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 884.400884] env[62109]: INFO nova.compute.manager [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Took 1.25 seconds to destroy the instance on the hypervisor. [ 884.400884] env[62109]: DEBUG oslo.service.loopingcall [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.400884] env[62109]: DEBUG nova.compute.manager [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.400884] env[62109]: DEBUG nova.network.neutron [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 884.424675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.424924] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.496801] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116713, 'name': ReconfigVM_Task, 'duration_secs': 0.311331} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.497436] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057/32cccd30-278c-48b6-8855-5cd76c2da057.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.498173] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af98c34c-ac26-4b72-8633-9fd7b6b10744 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.521166] env[62109]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port a54ea2c9-4872-4e92-893b-ad7c797f25ac could not be found.", "detail": ""}} {{(pid=62109) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 884.521166] env[62109]: DEBUG nova.network.neutron [-] Unable to show port a54ea2c9-4872-4e92-893b-ad7c797f25ac as it no longer exists. {{(pid=62109) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 884.523500] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0ece6f-918d-4214-9e5b-085fc45f086f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.549133] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aabd86-9ae8-44f8-b860-017455fcf3dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.575048] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618f81ea-773a-4087-9843-47617ffcf437 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.584807] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 884.585144] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4455a626-d789-4339-bc08-2eee46d2af75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.594146] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 884.594146] env[62109]: value = "task-1116715" [ 884.594146] env[62109]: _type = "Task" [ 884.594146] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.604025] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.648984] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.693881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.694077] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquired lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.695832] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 884.718748] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3c872150-9a16-4650-9c87-db6eec6a2a9f tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-7afbb35b-9865-40a7-8b37-d6a661a186a9-9b48845a-ae70-493f-8ea7-542088d62859" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.748515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7ec2f1f9-56c5-4ffc-9093-e2f8258cee6b tempest-ServersV294TestFqdnHostnames-185586524 tempest-ServersV294TestFqdnHostnames-185586524-project-member] Lock "2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.759s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.895744] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116711, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.930486] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 884.965172] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updated VIF entry in instance network info cache for port 79c26dde-062f-4334-8ba6-683c21a284d8. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 884.965714] env[62109]: DEBUG nova.network.neutron [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c26dde-062f-4334-8ba6-683c21a284d8", "address": "fa:16:3e:07:70:33", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c26dde-06", "ovs_interfaceid": "79c26dde-062f-4334-8ba6-683c21a284d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.112308] env[62109]: DEBUG oslo_vmware.api [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116715, 'name': PowerOnVM_Task, 'duration_secs': 0.416275} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.114533] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 885.149820] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116714, 'name': ReconfigVM_Task, 'duration_secs': 0.82127} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.150540] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 3e641c90-2358-4a1c-9af5-6ad96f722aba/3e641c90-2358-4a1c-9af5-6ad96f722aba.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.151627] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee2667e3-3927-4e31-9650-0193d7c31866 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.163210] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 885.163210] env[62109]: value = "task-1116716" [ 885.163210] env[62109]: _type = "Task" [ 885.163210] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.175137] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116716, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.261435] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 885.392908] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116711, 'name': ReconfigVM_Task, 'duration_secs': 1.072555} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.393200] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Reconfigured VM instance instance-00000048 to attach disk [datastore1] ac068268-1243-466e-8cd5-1ee2bc248ecd/ac068268-1243-466e-8cd5-1ee2bc248ecd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.395041] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d85f61a2-c088-4250-9960-d80df5b6f07a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.406041] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 885.406041] env[62109]: value = "task-1116717" [ 885.406041] env[62109]: _type = "Task" [ 885.406041] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.416144] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116717, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.460901] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.472407] env[62109]: DEBUG oslo_concurrency.lockutils [req-ff749fe8-f428-4ed0-8cfe-bd09f0503512 req-93bf09d0-b161-4616-9e99-2a7541575739 service nova] Releasing lock "refresh_cache-3e641c90-2358-4a1c-9af5-6ad96f722aba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.627751] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.627898] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.628130] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.628332] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.628501] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.639461] env[62109]: INFO nova.compute.manager [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Terminating instance [ 885.642877] env[62109]: DEBUG nova.compute.manager [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 885.642877] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 885.643040] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d6af6f-d9ac-4b08-9f25-54d44a4143bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.659912] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 885.661027] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8af7039a-130b-424c-b52c-0060d80f4036 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.671442] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 885.671442] env[62109]: value = "task-1116718" [ 885.671442] env[62109]: _type = "Task" [ 885.671442] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.684318] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116716, 'name': Rename_Task, 'duration_secs': 0.177389} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.685154] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 885.685447] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76e24d3a-ac59-42ec-b504-296ab59a8f73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.697348] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.707848] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 885.707848] env[62109]: value = "task-1116719" [ 885.707848] env[62109]: _type = "Task" [ 885.707848] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.719151] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.753655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0160a2f2-e719-4277-8bae-d01a58fa8b68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.763491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493ea86f-28ac-472c-b5e9-5f26d82f2c88 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.804850] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5653d4d5-5da1-4cb4-8927-f0badc180d58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.814536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130fa03f-52e0-4834-bcad-247871bb49d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.834609] env[62109]: DEBUG nova.compute.provider_tree [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.864658] env[62109]: DEBUG nova.network.neutron [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Updating instance_info_cache with network_info: [{"id": "a3f71607-a9b8-4285-bd06-1b908b502906", "address": "fa:16:3e:2f:83:70", "network": {"id": "ca8ac22c-69ab-4900-80ef-a42c5418116a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-260727829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d9fbfd06f2245f2b741e25fc30270b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f71607-a9", "ovs_interfaceid": "a3f71607-a9b8-4285-bd06-1b908b502906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.921143] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116717, 'name': Rename_Task, 'duration_secs': 0.171057} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.921143] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 885.921143] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f192ec9d-f81a-4bc9-9c84-8a8957e5246a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.928211] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 885.928211] env[62109]: value = "task-1116720" [ 885.928211] env[62109]: _type = "Task" [ 885.928211] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.939310] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116720, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.133743] env[62109]: INFO nova.compute.manager [None req-b10e6e49-5330-4870-a658-175db9ab3750 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance to original state: 'active' [ 886.189070] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116718, 'name': PowerOffVM_Task, 'duration_secs': 0.332974} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.189394] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 886.189571] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 886.189830] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7a9418c-8bda-4c17-b1ce-e066ed924135 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.201559] env[62109]: DEBUG nova.network.neutron [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.221441] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116719, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.314583] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 886.316400] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 886.316400] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleting the datastore file [datastore1] 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 886.316400] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4addf82c-a822-486b-ab8b-8251ab439f89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.325476] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 886.325476] env[62109]: value = "task-1116722" [ 886.325476] env[62109]: _type = "Task" [ 886.325476] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.337856] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.339987] env[62109]: DEBUG nova.scheduler.client.report [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.369213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Releasing lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.369213] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Instance network_info: |[{"id": "a3f71607-a9b8-4285-bd06-1b908b502906", "address": "fa:16:3e:2f:83:70", "network": {"id": "ca8ac22c-69ab-4900-80ef-a42c5418116a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-260727829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d9fbfd06f2245f2b741e25fc30270b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f71607-a9", "ovs_interfaceid": "a3f71607-a9b8-4285-bd06-1b908b502906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 886.369213] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:83:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3f71607-a9b8-4285-bd06-1b908b502906', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.378911] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Creating folder: Project (7d9fbfd06f2245f2b741e25fc30270b7). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 886.380754] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc6cf715-acac-46f8-86f9-e33469b80bfc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.390767] env[62109]: DEBUG oslo_concurrency.lockutils [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.391160] env[62109]: DEBUG oslo_concurrency.lockutils [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.391383] env[62109]: DEBUG nova.compute.manager [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 886.393696] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4e4dc1-daf2-42c6-aa41-efd50b6ed139 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.397810] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Created folder: Project (7d9fbfd06f2245f2b741e25fc30270b7) in parent group-v244329. [ 886.397978] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Creating folder: Instances. Parent ref: group-v244477. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 886.398776] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87c5d639-01ae-4165-84da-459ffe66a9cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.404021] env[62109]: DEBUG nova.compute.manager [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 886.405029] env[62109]: DEBUG nova.objects.instance [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.418889] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Created folder: Instances in parent group-v244477. [ 886.419274] env[62109]: DEBUG oslo.service.loopingcall [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.419567] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 886.420641] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3be35d2-ed37-44a4-9712-60a24799e782 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.456746] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116720, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.458940] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.458940] env[62109]: value = "task-1116725" [ 886.458940] env[62109]: _type = "Task" [ 886.458940] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.469293] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116725, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.566853] env[62109]: DEBUG nova.compute.manager [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Received event network-changed-a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.567118] env[62109]: DEBUG nova.compute.manager [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Refreshing instance network info cache due to event network-changed-a3f71607-a9b8-4285-bd06-1b908b502906. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.567400] env[62109]: DEBUG oslo_concurrency.lockutils [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] Acquiring lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.567742] env[62109]: DEBUG oslo_concurrency.lockutils [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] Acquired lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.567993] env[62109]: DEBUG nova.network.neutron [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Refreshing network info cache for port a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 886.707070] env[62109]: INFO nova.compute.manager [-] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Took 2.31 seconds to deallocate network for instance. [ 886.723771] env[62109]: DEBUG oslo_vmware.api [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116719, 'name': PowerOnVM_Task, 'duration_secs': 0.562984} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.723771] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 886.723771] env[62109]: INFO nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Took 13.60 seconds to spawn the instance on the hypervisor. [ 886.723771] env[62109]: DEBUG nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 886.724530] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89688751-5bc9-4bd5-a4f1-84239a503059 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.841414] env[62109]: DEBUG oslo_vmware.api [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209967} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.842609] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.843045] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 886.843253] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 886.843436] env[62109]: INFO nova.compute.manager [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Took 1.20 seconds to destroy the instance on the hypervisor. [ 886.845244] env[62109]: DEBUG oslo.service.loopingcall [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.845244] env[62109]: DEBUG nova.compute.manager [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 886.845244] env[62109]: DEBUG nova.network.neutron [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 886.846213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.846690] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 886.849233] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.291s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.849454] env[62109]: DEBUG nova.objects.instance [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'resources' on Instance uuid 5d656f91-d35f-45e1-8892-7cdacd306960 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.913917] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 886.913917] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f62fb2b-2f96-4a47-a40e-151888a33830 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.928236] env[62109]: DEBUG oslo_vmware.api [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 886.928236] env[62109]: value = "task-1116726" [ 886.928236] env[62109]: _type = "Task" [ 886.928236] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.942748] env[62109]: DEBUG oslo_vmware.api [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.954354] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116720, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.974383] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116725, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.218462] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.247969] env[62109]: INFO nova.compute.manager [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Took 44.88 seconds to build instance. [ 887.330015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.330518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.353135] env[62109]: DEBUG nova.compute.utils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 887.357270] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 887.357487] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 887.439032] env[62109]: DEBUG oslo_vmware.api [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116726, 'name': PowerOffVM_Task, 'duration_secs': 0.309948} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.441794] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 887.441994] env[62109]: DEBUG nova.compute.manager [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.443065] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120580b-298a-4d20-8e0f-15b597a716ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.447303] env[62109]: DEBUG nova.policy [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 887.467696] env[62109]: DEBUG oslo_vmware.api [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116720, 'name': PowerOnVM_Task, 'duration_secs': 1.267807} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.471014] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 887.471282] env[62109]: DEBUG nova.compute.manager [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.475319] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eee697e-d241-4306-86df-4502b2f45363 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.488749] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116725, 'name': CreateVM_Task, 'duration_secs': 0.597767} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.488906] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 887.489821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.490032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.490369] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 887.490669] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6af6733-df3d-4f65-a55a-4a9cd8d641ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.502771] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 887.502771] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c07a8f-ee05-660a-0dd3-4f36497b57ce" [ 887.502771] env[62109]: _type = "Task" [ 887.502771] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.521439] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c07a8f-ee05-660a-0dd3-4f36497b57ce, 'name': SearchDatastore_Task, 'duration_secs': 0.012667} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.524638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.524937] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.525209] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.525363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.525550] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.526636] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4916fbb8-ecfe-429c-aec7-6eaa8d781fd2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.538735] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.538735] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 887.546831] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a522320c-13f7-479b-982b-9aee57736cab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.557031] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 887.557031] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1a361-9296-65a0-8313-e7a3f18ceb5c" [ 887.557031] env[62109]: _type = "Task" [ 887.557031] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.558026] env[62109]: DEBUG nova.network.neutron [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Updated VIF entry in instance network info cache for port a3f71607-a9b8-4285-bd06-1b908b502906. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 887.558467] env[62109]: DEBUG nova.network.neutron [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Updating instance_info_cache with network_info: [{"id": "a3f71607-a9b8-4285-bd06-1b908b502906", "address": "fa:16:3e:2f:83:70", "network": {"id": "ca8ac22c-69ab-4900-80ef-a42c5418116a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-260727829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d9fbfd06f2245f2b741e25fc30270b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f71607-a9", "ovs_interfaceid": "a3f71607-a9b8-4285-bd06-1b908b502906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.570494] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1a361-9296-65a0-8313-e7a3f18ceb5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.753564] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af87d710-32b0-4968-b39d-004f3d5c9221 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.717s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.836054] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 887.842807] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025966c1-a46f-48b4-9d51-7f229722c463 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.859878] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67bfa8d-a938-4bbb-8936-3f5562700b42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.862408] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 887.899147] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf01b38-7641-4cc3-8bb5-b00cb561ecdf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.908785] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041543dc-f3cf-46e9-a196-5a544df7138f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.927048] env[62109]: DEBUG nova.compute.provider_tree [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.979111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-822ca101-55dc-415f-a7b5-b3f3d4bf3926 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.588s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.003165] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.011242] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.011540] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.064950] env[62109]: DEBUG oslo_concurrency.lockutils [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] Releasing lock "refresh_cache-6f31405e-a766-46da-8bf9-7be37a323bf3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.065094] env[62109]: DEBUG nova.compute.manager [req-5f3ae341-389f-443f-b067-25976e212ea1 req-5115339b-ab3d-4d68-bf8b-1454513cc874 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Received event network-vif-deleted-a11a7ca2-7088-4194-a63f-e4a9ed75ecc0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.072290] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1a361-9296-65a0-8313-e7a3f18ceb5c, 'name': SearchDatastore_Task, 'duration_secs': 0.012532} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.073131] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ee250f-e212-4d83-a363-7676e8782524 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.079620] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 888.079620] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3ea7d-4e17-086d-f1ce-2d22728d6740" [ 888.079620] env[62109]: _type = "Task" [ 888.079620] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.091341] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3ea7d-4e17-086d-f1ce-2d22728d6740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.092261] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Successfully created port: 00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.266365] env[62109]: DEBUG nova.compute.manager [req-f5e731fa-0c68-401d-81eb-2d72393e25e8 req-904fa2ac-6500-4dc9-80bb-6e42ef4ef9ef service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Received event network-vif-deleted-76f15b7e-4103-4568-8042-248ee15513dc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.266593] env[62109]: INFO nova.compute.manager [req-f5e731fa-0c68-401d-81eb-2d72393e25e8 req-904fa2ac-6500-4dc9-80bb-6e42ef4ef9ef service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Neutron deleted interface 76f15b7e-4103-4568-8042-248ee15513dc; detaching it from the instance and deleting it from the info cache [ 888.266755] env[62109]: DEBUG nova.network.neutron [req-f5e731fa-0c68-401d-81eb-2d72393e25e8 req-904fa2ac-6500-4dc9-80bb-6e42ef4ef9ef service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.361980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.403507] env[62109]: DEBUG nova.network.neutron [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.430279] env[62109]: DEBUG nova.scheduler.client.report [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 888.516938] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 888.593753] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c3ea7d-4e17-086d-f1ce-2d22728d6740, 'name': SearchDatastore_Task, 'duration_secs': 0.018796} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.594185] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.594486] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 6f31405e-a766-46da-8bf9-7be37a323bf3/6f31405e-a766-46da-8bf9-7be37a323bf3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 888.595350] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61f6a5b4-9101-4ecd-a864-162c9e8e1b02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.604017] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 888.604017] env[62109]: value = "task-1116727" [ 888.604017] env[62109]: _type = "Task" [ 888.604017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.616277] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.772083] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea100739-8d5b-4c14-82ba-0c499da1b26f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.782090] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c90e671-dc36-4cd3-ba04-c3cb0208fc89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.826831] env[62109]: DEBUG nova.compute.manager [req-f5e731fa-0c68-401d-81eb-2d72393e25e8 req-904fa2ac-6500-4dc9-80bb-6e42ef4ef9ef service nova] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Detach interface failed, port_id=76f15b7e-4103-4568-8042-248ee15513dc, reason: Instance 1399f618-3a93-4731-a59b-f98306d6cd52 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 888.872668] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 888.904378] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.904555] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.905086] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.905086] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.905086] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.905246] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.905381] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.905548] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.905724] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.905894] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.906312] env[62109]: DEBUG nova.virt.hardware [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.906770] env[62109]: INFO nova.compute.manager [-] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Took 2.06 seconds to deallocate network for instance. [ 888.907625] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4235a7e-4fa9-4a8e-98a2-f1c949961e02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.920021] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0912a0c-dd39-4d45-afa0-0012d233c82d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.939472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.947060] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.544s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.947060] env[62109]: INFO nova.compute.claims [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.964304] env[62109]: INFO nova.scheduler.client.report [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted allocations for instance 5d656f91-d35f-45e1-8892-7cdacd306960 [ 889.041570] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.115936] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116727, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.328379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.328749] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.329056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.329369] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.329567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.332095] env[62109]: INFO nova.compute.manager [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Terminating instance [ 889.334102] env[62109]: DEBUG nova.compute.manager [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 889.334394] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 889.335439] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dabe7c-c3f0-45a2-98e2-72e46aedca73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.343804] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 889.344137] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b3e8696-4ad4-44f3-94af-d695c8b33040 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.353290] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 889.353290] env[62109]: value = "task-1116728" [ 889.353290] env[62109]: _type = "Task" [ 889.353290] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.370229] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.420365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.451591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "32cccd30-278c-48b6-8855-5cd76c2da057" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.451591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.451591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.451591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.451591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.462254] env[62109]: INFO nova.compute.manager [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Terminating instance [ 889.463427] env[62109]: DEBUG nova.compute.manager [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 889.463663] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 889.464571] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae210d1d-12ee-46f6-a40b-b723434433ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.478201] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 889.478645] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 889.479195] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bf4516ec-0442-4997-becd-b657e0310d56 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5d656f91-d35f-45e1-8892-7cdacd306960" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.513s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.480112] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 889.482875] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-147cce5c-bd8d-4322-8003-7d4f32790c49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.490426] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 889.490426] env[62109]: value = "task-1116729" [ 889.490426] env[62109]: _type = "Task" [ 889.490426] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.501214] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.603429] env[62109]: DEBUG nova.compute.manager [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Stashing vm_state: stopped {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 889.618714] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717582} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.619220] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 6f31405e-a766-46da-8bf9-7be37a323bf3/6f31405e-a766-46da-8bf9-7be37a323bf3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 889.619615] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.620054] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfab2f10-1b22-407b-a6c5-60896e565abd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.630330] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 889.630330] env[62109]: value = "task-1116730" [ 889.630330] env[62109]: _type = "Task" [ 889.630330] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.642876] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116730, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.810680] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Successfully updated port: 00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.866041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.866329] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.866540] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "ac068268-1243-466e-8cd5-1ee2bc248ecd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.866723] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.866892] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.868524] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116728, 'name': PowerOffVM_Task, 'duration_secs': 0.289567} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.868975] env[62109]: INFO nova.compute.manager [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Terminating instance [ 889.870351] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 889.870533] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 889.870962] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "refresh_cache-ac068268-1243-466e-8cd5-1ee2bc248ecd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.871133] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "refresh_cache-ac068268-1243-466e-8cd5-1ee2bc248ecd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.871296] env[62109]: DEBUG nova.network.neutron [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 889.872199] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b6bbaa5-13db-4126-93c7-e9c02df40675 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.999217] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 889.999513] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 890.011922] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116729, 'name': PowerOffVM_Task, 'duration_secs': 0.23977} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.012293] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 890.012476] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 890.012918] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-114651b2-0b95-425e-b7fb-8fce99d50351 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.025165] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 890.025478] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 890.025687] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleting the datastore file [datastore2] 3e641c90-2358-4a1c-9af5-6ad96f722aba {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.026040] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f6539e1-5d1f-49c2-904b-9c5a14da762e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.036181] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 890.036181] env[62109]: value = "task-1116733" [ 890.036181] env[62109]: _type = "Task" [ 890.036181] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.046364] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.095248] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 890.095496] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 890.095690] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Deleting the datastore file [datastore1] 32cccd30-278c-48b6-8855-5cd76c2da057 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.095997] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30e7fe9e-90fe-4c6f-a7d5-d53c3dcc4bc0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.103107] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for the task: (returnval){ [ 890.103107] env[62109]: value = "task-1116734" [ 890.103107] env[62109]: _type = "Task" [ 890.103107] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.117618] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.132601] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.146716] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116730, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110968} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.150121] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.151195] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146a4a41-bc06-4837-866a-dc432d44546c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.177341] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 6f31405e-a766-46da-8bf9-7be37a323bf3/6f31405e-a766-46da-8bf9-7be37a323bf3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.180276] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b925b280-57f6-4cc2-86d8-9ba870b594d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.205018] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 890.205018] env[62109]: value = "task-1116735" [ 890.205018] env[62109]: _type = "Task" [ 890.205018] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.213107] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.313239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.313409] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.313583] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.392761] env[62109]: DEBUG nova.network.neutron [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.416443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d9d4c1-1e96-4ce4-9e40-69f3d089bb66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.431867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb6e973-128f-4458-84e1-fb5b670e63b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.474260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaba0927-0d7c-420b-ad6d-04a5ba6c7b66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.478400] env[62109]: DEBUG nova.network.neutron [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.487045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe3f507-620b-4be3-8190-b5da9dda690e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.495079] env[62109]: DEBUG nova.compute.manager [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Received event network-vif-plugged-00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.495309] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Acquiring lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.495518] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.495699] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.495894] env[62109]: DEBUG nova.compute.manager [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] No waiting events found dispatching network-vif-plugged-00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 890.496142] env[62109]: WARNING nova.compute.manager [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Received unexpected event network-vif-plugged-00cdfb69-948b-48cf-9c2a-107bb3d177c0 for instance with vm_state building and task_state spawning. [ 890.496297] env[62109]: DEBUG nova.compute.manager [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Received event network-changed-00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.496458] env[62109]: DEBUG nova.compute.manager [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Refreshing instance network info cache due to event network-changed-00cdfb69-948b-48cf-9c2a-107bb3d177c0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 890.496646] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Acquiring lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.511378] env[62109]: DEBUG nova.compute.provider_tree [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.548495] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.615459] env[62109]: DEBUG oslo_vmware.api [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Task: {'id': task-1116734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.463582} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.616114] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.616228] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 890.616342] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 890.616520] env[62109]: INFO nova.compute.manager [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Took 1.15 seconds to destroy the instance on the hypervisor. [ 890.616765] env[62109]: DEBUG oslo.service.loopingcall [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.616968] env[62109]: DEBUG nova.compute.manager [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.617074] env[62109]: DEBUG nova.network.neutron [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.715638] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.887505] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.983371] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "refresh_cache-ac068268-1243-466e-8cd5-1ee2bc248ecd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.983896] env[62109]: DEBUG nova.compute.manager [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 890.984159] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.985137] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85376f1-7f0c-495d-9abe-09120e239538 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.995782] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 890.996150] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e30eb452-39fd-4fee-b4bc-1b3f8016151f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.003816] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 891.003816] env[62109]: value = "task-1116736" [ 891.003816] env[62109]: _type = "Task" [ 891.003816] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.028062] env[62109]: DEBUG nova.scheduler.client.report [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.028062] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.052263] env[62109]: DEBUG oslo_vmware.api [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.538599} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.053357] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.053357] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 891.053357] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 891.055316] env[62109]: INFO nova.compute.manager [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Took 1.72 seconds to destroy the instance on the hypervisor. [ 891.055316] env[62109]: DEBUG oslo.service.loopingcall [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.055316] env[62109]: DEBUG nova.compute.manager [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 891.055316] env[62109]: DEBUG nova.network.neutron [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 891.211341] env[62109]: DEBUG nova.network.neutron [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Updating instance_info_cache with network_info: [{"id": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "address": "fa:16:3e:ff:c4:52", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00cdfb69-94", "ovs_interfaceid": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.219750] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116735, 'name': ReconfigVM_Task, 'duration_secs': 0.61018} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.220300] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 6f31405e-a766-46da-8bf9-7be37a323bf3/6f31405e-a766-46da-8bf9-7be37a323bf3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.221229] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcfa800c-0212-4197-8b12-3ad1e66b0045 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.231703] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 891.231703] env[62109]: value = "task-1116737" [ 891.231703] env[62109]: _type = "Task" [ 891.231703] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.248817] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116737, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.514925] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116736, 'name': PowerOffVM_Task, 'duration_secs': 0.127646} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.515299] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 891.515471] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.515729] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb62ffcc-6428-4e48-bb02-e6013ed6f109 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.530665] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.533372] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 891.535481] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.797s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.535721] env[62109]: DEBUG nova.objects.instance [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lazy-loading 'resources' on Instance uuid b1321874-8f97-4444-9f9c-d586d51a9e92 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 891.547805] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.548247] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.548509] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleting the datastore file [datastore1] ac068268-1243-466e-8cd5-1ee2bc248ecd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.548819] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34d38cd1-c679-4e13-bd87-401bbfb76435 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.559210] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 891.559210] env[62109]: value = "task-1116739" [ 891.559210] env[62109]: _type = "Task" [ 891.559210] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.560200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.560350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.560954] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 891.574007] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.648480] env[62109]: DEBUG nova.network.neutron [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.715245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.715629] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Instance network_info: |[{"id": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "address": "fa:16:3e:ff:c4:52", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00cdfb69-94", "ovs_interfaceid": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 891.716029] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Acquired lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.716259] env[62109]: DEBUG nova.network.neutron [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Refreshing network info cache for port 00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 891.718757] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:c4:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00cdfb69-948b-48cf-9c2a-107bb3d177c0', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.729377] env[62109]: DEBUG oslo.service.loopingcall [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.730840] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 891.731164] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aafc149e-abc3-4ae4-b1e7-f68bee16a87c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.757129] env[62109]: DEBUG nova.compute.manager [req-5feed897-9fa7-4d96-9a68-e6743d086e2b req-2b3a555b-85f0-4349-93c2-5ac0da449b4c service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-deleted-79c26dde-062f-4334-8ba6-683c21a284d8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.757129] env[62109]: INFO nova.compute.manager [req-5feed897-9fa7-4d96-9a68-e6743d086e2b req-2b3a555b-85f0-4349-93c2-5ac0da449b4c service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Neutron deleted interface 79c26dde-062f-4334-8ba6-683c21a284d8; detaching it from the instance and deleting it from the info cache [ 891.757668] env[62109]: DEBUG nova.network.neutron [req-5feed897-9fa7-4d96-9a68-e6743d086e2b req-2b3a555b-85f0-4349-93c2-5ac0da449b4c service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [{"id": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "address": "fa:16:3e:16:bb:4c", "network": {"id": "a7845d92-168d-4641-a527-4cc32c2f029a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-46881385", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.89", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89be5c13-2c", "ovs_interfaceid": "89be5c13-2cda-47b1-a962-1d53a02d7a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "address": "fa:16:3e:71:8b:cf", "network": {"id": "502a9a4e-70c6-40ab-9aba-616ccd137a90", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-196427286", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15cf7b7-f5", "ovs_interfaceid": "a15cf7b7-f572-459b-9018-141b9c7f1da9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.770754] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116737, 'name': Rename_Task, 'duration_secs': 0.198795} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.772211] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 891.772478] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.772478] env[62109]: value = "task-1116740" [ 891.772478] env[62109]: _type = "Task" [ 891.772478] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.772645] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-645276e1-4ca6-4cfe-b434-3d33bc01c6fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.783772] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116740, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.785140] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 891.785140] env[62109]: value = "task-1116741" [ 891.785140] env[62109]: _type = "Task" [ 891.785140] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.794778] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116741, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.038770] env[62109]: DEBUG nova.compute.utils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.040636] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 892.040906] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 892.085298] env[62109]: DEBUG oslo_vmware.api [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190819} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.085605] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.085818] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.086083] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.086674] env[62109]: INFO nova.compute.manager [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 892.086860] env[62109]: DEBUG oslo.service.loopingcall [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.087062] env[62109]: DEBUG nova.compute.manager [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.087172] env[62109]: DEBUG nova.network.neutron [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.109525] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.111433] env[62109]: DEBUG nova.network.neutron [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.115519] env[62109]: DEBUG nova.policy [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f0db4664ce465b8e71928d55284d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cddb4c7a9ba442d98d6cf4f3ab30ad71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 892.152525] env[62109]: INFO nova.compute.manager [-] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Took 1.54 seconds to deallocate network for instance. [ 892.261343] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f985e937-9d65-46cf-bd90-9e52dbcec0a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.274184] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7279a0-daa7-4199-9b3e-7bc908a2be4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.319782] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116740, 'name': CreateVM_Task, 'duration_secs': 0.424339} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.322673] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 892.323060] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116741, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.326370] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.326593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.326926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.336745] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5626cfd5-5505-422a-b29d-35c6c3b267af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.339101] env[62109]: DEBUG nova.compute.manager [req-5feed897-9fa7-4d96-9a68-e6743d086e2b req-2b3a555b-85f0-4349-93c2-5ac0da449b4c service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Detach interface failed, port_id=79c26dde-062f-4334-8ba6-683c21a284d8, reason: Instance 3e641c90-2358-4a1c-9af5-6ad96f722aba could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 892.347017] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 892.347017] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52013a33-97a4-a01c-93c2-a6802823264e" [ 892.347017] env[62109]: _type = "Task" [ 892.347017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.355186] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52013a33-97a4-a01c-93c2-a6802823264e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.543210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963dbc5f-35bf-43eb-91b2-ceb96dd8eee9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.547516] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 892.555444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aa856a-b9f8-4a08-89ca-f5e5933ad1ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.592534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8761664a-55fe-414d-b511-87bc4c494bb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.601693] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8157e37c-670f-4f9c-9052-2e77b8fd952a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.618338] env[62109]: DEBUG nova.compute.provider_tree [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.619688] env[62109]: DEBUG nova.network.neutron [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.643950] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Successfully created port: 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.656137] env[62109]: DEBUG nova.network.neutron [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Updated VIF entry in instance network info cache for port 00cdfb69-948b-48cf-9c2a-107bb3d177c0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 892.657647] env[62109]: DEBUG nova.network.neutron [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Updating instance_info_cache with network_info: [{"id": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "address": "fa:16:3e:ff:c4:52", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00cdfb69-94", "ovs_interfaceid": "00cdfb69-948b-48cf-9c2a-107bb3d177c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.664716] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.723299] env[62109]: DEBUG nova.network.neutron [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.726958] env[62109]: DEBUG nova.compute.manager [req-8aaafffa-bca4-4575-9178-b2a51619778b req-99d8d9ce-b619-4297-81de-bba8039a411a service nova] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Received event network-vif-deleted-de4056dc-a527-43f0-ad81-f82e5cb00f86 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.763098] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.809029] env[62109]: DEBUG oslo_vmware.api [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116741, 'name': PowerOnVM_Task, 'duration_secs': 0.553716} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.809428] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 892.809650] env[62109]: INFO nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Took 9.90 seconds to spawn the instance on the hypervisor. [ 892.809836] env[62109]: DEBUG nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 892.810663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e66604d-a35d-4e45-ba91-b7393929ade6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.859063] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52013a33-97a4-a01c-93c2-a6802823264e, 'name': SearchDatastore_Task, 'duration_secs': 0.044554} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.859063] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.859161] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.859788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.859788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.859907] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.860296] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9596620a-1fdd-4778-b215-d80280c446a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.871534] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.871744] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 892.872766] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-524bc23d-f280-432a-a391-1421d3379f60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.878850] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 892.878850] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52934c20-caa5-b8a7-ec8a-569e2169f643" [ 892.878850] env[62109]: _type = "Task" [ 892.878850] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.890019] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52934c20-caa5-b8a7-ec8a-569e2169f643, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.122017] env[62109]: DEBUG nova.scheduler.client.report [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.125384] env[62109]: INFO nova.compute.manager [-] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Took 1.04 seconds to deallocate network for instance. [ 893.159479] env[62109]: DEBUG oslo_concurrency.lockutils [req-35ca0267-08e7-4e31-bcf1-beacb9443cc3 req-fe286247-6bbf-454e-8b75-6880a7aef4b5 service nova] Releasing lock "refresh_cache-448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.228654] env[62109]: INFO nova.compute.manager [-] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Took 2.17 seconds to deallocate network for instance. [ 893.266130] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-c753a2db-d701-4508-88bd-4ebe4f32a075" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.270504] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.270504] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 893.270504] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.332837] env[62109]: INFO nova.compute.manager [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Took 43.20 seconds to build instance. [ 893.346469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.346750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.390247] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52934c20-caa5-b8a7-ec8a-569e2169f643, 'name': SearchDatastore_Task, 'duration_secs': 0.024167} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.391079] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d517ded9-f237-4a5a-8612-32b5622822bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.397600] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 893.397600] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215a236-706f-5f70-c462-1cd6e638645b" [ 893.397600] env[62109]: _type = "Task" [ 893.397600] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.409943] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215a236-706f-5f70-c462-1cd6e638645b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.557894] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 893.588659] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 893.588924] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 893.589112] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.589308] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 893.589460] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.589612] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 893.589823] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 893.589989] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 893.590196] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 893.590378] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 893.590556] env[62109]: DEBUG nova.virt.hardware [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 893.591461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd975c21-2298-40ea-b556-c25408eb3be6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.599912] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed70f70-8003-4a28-8685-eb17c44faa20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.626920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.629533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.886s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.629888] env[62109]: DEBUG nova.objects.instance [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lazy-loading 'resources' on Instance uuid d7d1029c-9b7c-4bd7-b606-a1962a129461 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.632694] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.650313] env[62109]: INFO nova.scheduler.client.report [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted allocations for instance b1321874-8f97-4444-9f9c-d586d51a9e92 [ 893.734206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.773682] env[62109]: DEBUG nova.compute.manager [req-dedd4838-dd69-4484-bb65-fb7a448a1746 req-77b090d8-80e6-43b9-aa77-81c70b4a6f94 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-deleted-89be5c13-2cda-47b1-a962-1d53a02d7a5e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.773905] env[62109]: DEBUG nova.compute.manager [req-dedd4838-dd69-4484-bb65-fb7a448a1746 req-77b090d8-80e6-43b9-aa77-81c70b4a6f94 service nova] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Received event network-vif-deleted-a15cf7b7-f572-459b-9018-141b9c7f1da9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.774855] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.834170] env[62109]: DEBUG oslo_concurrency.lockutils [None req-541dd0c7-712c-4bc5-8088-bb8a64a0a606 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.707s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.851231] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.913936] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215a236-706f-5f70-c462-1cd6e638645b, 'name': SearchDatastore_Task, 'duration_secs': 0.01305} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.914258] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.914523] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d/448371eb-c1dd-4d7b-b946-aaf6c3a3a36d.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 893.914787] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d394a7a1-e430-4b5f-ba41-25f0cc852384 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.922037] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 893.922037] env[62109]: value = "task-1116742" [ 893.922037] env[62109]: _type = "Task" [ 893.922037] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.930710] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.157788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d784137e-2096-4e0a-81ea-a54dfcdd7cb3 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "b1321874-8f97-4444-9f9c-d586d51a9e92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.390s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.233133] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Successfully updated port: 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.374750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.435503] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.561572] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8487dbd1-320c-4bee-9b97-ffda770ddc7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.570604] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147fbbd9-c40a-4349-8dfe-c9840ac46d24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.607262] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad85727-c4b3-49b9-9c91-4529f339cdb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.624060] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e600f4a2-ebe2-471f-b313-730e79a63d1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.644574] env[62109]: DEBUG nova.compute.provider_tree [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.735833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.735833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.735833] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.903148] env[62109]: DEBUG nova.compute.manager [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received event network-vif-plugged-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.903507] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Acquiring lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.903588] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.903974] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.903974] env[62109]: DEBUG nova.compute.manager [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] No waiting events found dispatching network-vif-plugged-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 894.904165] env[62109]: WARNING nova.compute.manager [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received unexpected event network-vif-plugged-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 for instance with vm_state building and task_state spawning. [ 894.904358] env[62109]: DEBUG nova.compute.manager [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.904417] env[62109]: DEBUG nova.compute.manager [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing instance network info cache due to event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 894.904574] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Acquiring lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.935668] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.902908} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.935955] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d/448371eb-c1dd-4d7b-b946-aaf6c3a3a36d.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 894.936277] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.936849] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c86d5280-356c-4075-9105-dd57124b4d5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.945041] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 894.945041] env[62109]: value = "task-1116743" [ 894.945041] env[62109]: _type = "Task" [ 894.945041] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.952634] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.124197] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "6f31405e-a766-46da-8bf9-7be37a323bf3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.124263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.124463] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.124652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.124825] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.128429] env[62109]: INFO nova.compute.manager [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Terminating instance [ 895.130405] env[62109]: DEBUG nova.compute.manager [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 895.130618] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 895.132028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa670295-84a9-48d2-b166-e396a93aa843 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.140272] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 895.140557] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bddb0be-e1a3-48be-9dcd-f8b78300b154 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.148183] env[62109]: DEBUG nova.scheduler.client.report [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.151594] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 895.151594] env[62109]: value = "task-1116744" [ 895.151594] env[62109]: _type = "Task" [ 895.151594] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.160386] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.270177] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.416150] env[62109]: DEBUG nova.network.neutron [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.455534] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069071} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.455800] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.456608] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4befd7-eabe-4e3c-abab-51db7f08bc20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.481752] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d/448371eb-c1dd-4d7b-b946-aaf6c3a3a36d.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.482203] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8127e11-15f2-4ef7-80bf-59187c601de4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.501793] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 895.501793] env[62109]: value = "task-1116745" [ 895.501793] env[62109]: _type = "Task" [ 895.501793] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.509660] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.656872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.027s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.659768] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.237s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.659768] env[62109]: DEBUG nova.objects.instance [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lazy-loading 'resources' on Instance uuid 0f197e98-9630-4928-8707-56bbf6c1e5a1 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.666603] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116744, 'name': PowerOffVM_Task, 'duration_secs': 0.197008} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.667462] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 895.668165] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 895.668165] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ed6a31c-d3bc-4826-9399-a10c51786df2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.684213] env[62109]: INFO nova.scheduler.client.report [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Deleted allocations for instance d7d1029c-9b7c-4bd7-b606-a1962a129461 [ 895.744731] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 895.744966] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 895.745192] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Deleting the datastore file [datastore1] 6f31405e-a766-46da-8bf9-7be37a323bf3 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.745509] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ea4f473-8822-448e-9f68-8b55d26fc95b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.753971] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for the task: (returnval){ [ 895.753971] env[62109]: value = "task-1116747" [ 895.753971] env[62109]: _type = "Task" [ 895.753971] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.764429] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.918864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.919264] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Instance network_info: |[{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 895.919592] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Acquired lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.919780] env[62109]: DEBUG nova.network.neutron [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 895.921076] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:53:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0dd3c126-9d86-4f9a-b81c-e9627c7a5401', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8398d68d-f5f3-4bd3-8e76-aa3d0916ece2', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.929868] env[62109]: DEBUG oslo.service.loopingcall [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.932159] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 895.932402] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd040a4d-9629-4fa1-8061-562116a7bdd7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.954653] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.954653] env[62109]: value = "task-1116748" [ 895.954653] env[62109]: _type = "Task" [ 895.954653] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.963043] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116748, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.013119] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116745, 'name': ReconfigVM_Task, 'duration_secs': 0.318355} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.013119] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d/448371eb-c1dd-4d7b-b946-aaf6c3a3a36d.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.013819] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca1ff6d-20eb-4d38-886c-c5b0e2b1a9b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.023267] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 896.023267] env[62109]: value = "task-1116749" [ 896.023267] env[62109]: _type = "Task" [ 896.023267] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.037971] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116749, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.192738] env[62109]: DEBUG oslo_concurrency.lockutils [None req-07b9e091-892d-4850-bdb5-c260341c7822 tempest-MultipleCreateTestJSON-2004588672 tempest-MultipleCreateTestJSON-2004588672-project-member] Lock "d7d1029c-9b7c-4bd7-b606-a1962a129461" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.499s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.265028] env[62109]: DEBUG oslo_vmware.api [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Task: {'id': task-1116747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164634} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.267980] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.269089] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 896.269089] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.269089] env[62109]: INFO nova.compute.manager [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 896.269089] env[62109]: DEBUG oslo.service.loopingcall [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.269623] env[62109]: DEBUG nova.compute.manager [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.269818] env[62109]: DEBUG nova.network.neutron [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.469777] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116748, 'name': CreateVM_Task, 'duration_secs': 0.414947} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.469899] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 896.470671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.470849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.471202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.471459] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f317c412-df6f-482b-ad12-ad4545b16273 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.478499] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 896.478499] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241935b-74c3-1432-91cc-0586862e79e3" [ 896.478499] env[62109]: _type = "Task" [ 896.478499] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.486879] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241935b-74c3-1432-91cc-0586862e79e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.533744] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116749, 'name': Rename_Task, 'duration_secs': 0.159014} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.533744] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 896.534820] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e13427bb-c1ca-4de0-84bf-c0baefcb8f99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.543766] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 896.543766] env[62109]: value = "task-1116750" [ 896.543766] env[62109]: _type = "Task" [ 896.543766] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.555193] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.598464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a58c92c-266a-4077-a804-4bfbf66743c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.606418] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5b7a96-e541-4932-a014-1b3aa2bcf78e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.643645] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a11964-10ee-4729-80a0-044be59e9f95 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.652421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4563063b-f0d5-4bee-841f-cf1376ed167a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.667924] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.884260] env[62109]: DEBUG nova.network.neutron [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updated VIF entry in instance network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 896.884639] env[62109]: DEBUG nova.network.neutron [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.992201] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241935b-74c3-1432-91cc-0586862e79e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.992771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.993068] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.993363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.993554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.993780] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.994123] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6e6e454-9416-47ea-a1f5-ed446a2fc5e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.005668] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.005957] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 897.006911] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daa39604-e5eb-436b-b43f-8da81505b4d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.013682] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 897.013682] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294bc9b-e049-84e7-3116-53d8c553e8cc" [ 897.013682] env[62109]: _type = "Task" [ 897.013682] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.025737] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294bc9b-e049-84e7-3116-53d8c553e8cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.027312] env[62109]: DEBUG nova.compute.manager [req-8f0a4e54-7ca1-4dde-9e30-558664751ae1 req-e39a5263-04b4-4f0a-8c68-7d5f012ef90d service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Received event network-vif-deleted-a3f71607-a9b8-4285-bd06-1b908b502906 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.027902] env[62109]: INFO nova.compute.manager [req-8f0a4e54-7ca1-4dde-9e30-558664751ae1 req-e39a5263-04b4-4f0a-8c68-7d5f012ef90d service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Neutron deleted interface a3f71607-a9b8-4285-bd06-1b908b502906; detaching it from the instance and deleting it from the info cache [ 897.028251] env[62109]: DEBUG nova.network.neutron [req-8f0a4e54-7ca1-4dde-9e30-558664751ae1 req-e39a5263-04b4-4f0a-8c68-7d5f012ef90d service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.055991] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116750, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.189196] env[62109]: ERROR nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [req-64dbaf1f-ba34-4bb7-a663-0fd8f9cecd87] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-64dbaf1f-ba34-4bb7-a663-0fd8f9cecd87"}]} [ 897.208743] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 897.226092] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 897.226389] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.238401] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 897.257226] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 897.346251] env[62109]: DEBUG nova.network.neutron [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.387570] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d8985c2-5724-4f29-a14d-6e7cb647c8c0 req-7b4bd7da-4dde-4bcb-99a7-c94a8b74f545 service nova] Releasing lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.529776] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294bc9b-e049-84e7-3116-53d8c553e8cc, 'name': SearchDatastore_Task, 'duration_secs': 0.014626} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.531097] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf206f11-e92c-4627-9a59-12b38aacb43e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.536024] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50353186-3a1a-4323-b6aa-fc0e3a9da643 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.539414] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 897.539414] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52562e31-b9a5-369d-54fd-d49ea27d19f8" [ 897.539414] env[62109]: _type = "Task" [ 897.539414] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.551219] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70fcb38-2191-46f2-afff-22e17e6e08e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.570026] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52562e31-b9a5-369d-54fd-d49ea27d19f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.578483] env[62109]: DEBUG oslo_vmware.api [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116750, 'name': PowerOnVM_Task, 'duration_secs': 0.529678} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.578771] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 897.578970] env[62109]: INFO nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Took 8.71 seconds to spawn the instance on the hypervisor. [ 897.579286] env[62109]: DEBUG nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.580812] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9590692-8596-4b71-8044-ee05f53ab9be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.593731] env[62109]: DEBUG nova.compute.manager [req-8f0a4e54-7ca1-4dde-9e30-558664751ae1 req-e39a5263-04b4-4f0a-8c68-7d5f012ef90d service nova] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Detach interface failed, port_id=a3f71607-a9b8-4285-bd06-1b908b502906, reason: Instance 6f31405e-a766-46da-8bf9-7be37a323bf3 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 897.763325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180d9f00-9bff-4cbe-9425-d5a09dea85c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.771852] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca56e098-9832-4462-901f-3c462b1d65db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.805250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5bdbfe-a588-401b-8fbc-c6aef9753541 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.813949] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2def055-cb33-4d31-818e-9a1551566fed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.833812] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.850098] env[62109]: INFO nova.compute.manager [-] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Took 1.58 seconds to deallocate network for instance. [ 898.050965] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52562e31-b9a5-369d-54fd-d49ea27d19f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.051265] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.051548] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5c7dbe04-5027-49cd-a478-79046fee1f16/5c7dbe04-5027-49cd-a478-79046fee1f16.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 898.051812] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b871512-6c4d-4df7-b814-45ccc03f5ff8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.059929] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 898.059929] env[62109]: value = "task-1116751" [ 898.059929] env[62109]: _type = "Task" [ 898.059929] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.068634] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.114285] env[62109]: INFO nova.compute.manager [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Took 40.04 seconds to build instance. [ 898.358610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.374814] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 898.375143] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 102 to 103 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 898.375347] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.570520] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116751, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.575177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.616501] env[62109]: DEBUG oslo_concurrency.lockutils [None req-05ca6860-bc81-4bc8-b4fe-69a1d4ba05e7 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.551s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.616501] env[62109]: DEBUG oslo_concurrency.lockutils [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.041s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.616501] env[62109]: DEBUG nova.compute.manager [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 898.616829] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64f274-e951-461b-8785-9b77dd85848d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.624418] env[62109]: DEBUG nova.compute.manager [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 898.625030] env[62109]: DEBUG nova.objects.instance [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'flavor' on Instance uuid 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.881596] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.222s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.883565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.752s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.885449] env[62109]: INFO nova.compute.claims [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.908966] env[62109]: INFO nova.scheduler.client.report [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleted allocations for instance 0f197e98-9630-4928-8707-56bbf6c1e5a1 [ 899.072543] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555143} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.072825] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5c7dbe04-5027-49cd-a478-79046fee1f16/5c7dbe04-5027-49cd-a478-79046fee1f16.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 899.073058] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.073321] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4eb1faa0-ea06-436b-8ac0-04db98516254 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.080999] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 899.080999] env[62109]: value = "task-1116752" [ 899.080999] env[62109]: _type = "Task" [ 899.080999] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.091603] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116752, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.130871] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 899.133404] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbff2e87-0af0-473a-ac8e-7515aaf664fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.141019] env[62109]: DEBUG oslo_vmware.api [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 899.141019] env[62109]: value = "task-1116753" [ 899.141019] env[62109]: _type = "Task" [ 899.141019] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.150972] env[62109]: DEBUG oslo_vmware.api [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.416100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3a3e13-b082-44eb-996c-2210df76abdf tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "0f197e98-9630-4928-8707-56bbf6c1e5a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.605s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.591440] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116752, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073292} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.591757] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.592526] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb3a899-32d8-4ed6-8f3b-f64b6ee7cd9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.617045] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 5c7dbe04-5027-49cd-a478-79046fee1f16/5c7dbe04-5027-49cd-a478-79046fee1f16.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.617045] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f94bc64-2345-4d16-be3c-646bc92bf16d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.637143] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 899.637143] env[62109]: value = "task-1116754" [ 899.637143] env[62109]: _type = "Task" [ 899.637143] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.652950] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116754, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.653282] env[62109]: DEBUG oslo_vmware.api [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116753, 'name': PowerOffVM_Task, 'duration_secs': 0.352331} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.653541] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 899.653718] env[62109]: DEBUG nova.compute.manager [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.654475] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb2c54e-95fb-4999-9a24-afa6e8644042 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.149911] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116754, 'name': ReconfigVM_Task, 'duration_secs': 0.288471} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.150261] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 5c7dbe04-5027-49cd-a478-79046fee1f16/5c7dbe04-5027-49cd-a478-79046fee1f16.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.150944] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46991f58-57a4-433a-bb48-2ebcce6b15c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.161592] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 900.161592] env[62109]: value = "task-1116756" [ 900.161592] env[62109]: _type = "Task" [ 900.161592] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.169913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-87e04058-ca92-468e-8199-9932b169a637 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.175165] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116756, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.228561] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f625c6-f9d7-432e-a484-c17603a0b738 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.236760] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01477e29-1b92-46e5-8d2d-73815862eaa6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.270391] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9916a0b-dc38-4bdc-9b5f-e3d4a10331cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.279525] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f006e4d1-4af2-4f9f-a0dc-1cca75c04280 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.293518] env[62109]: DEBUG nova.compute.provider_tree [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.673402] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116756, 'name': Rename_Task, 'duration_secs': 0.155463} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.673689] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 900.673937] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9577e90-f9cc-43c8-a94c-184b757f390c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.683207] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 900.683207] env[62109]: value = "task-1116757" [ 900.683207] env[62109]: _type = "Task" [ 900.683207] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.691901] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.796470] env[62109]: DEBUG nova.scheduler.client.report [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.194194] env[62109]: DEBUG oslo_vmware.api [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116757, 'name': PowerOnVM_Task, 'duration_secs': 0.462139} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.194613] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 901.194786] env[62109]: INFO nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Took 7.64 seconds to spawn the instance on the hypervisor. [ 901.194973] env[62109]: DEBUG nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 901.195782] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732bd4af-5789-4527-9fc8-596c3438e1cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.302165] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.302728] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 901.305915] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.970s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.379975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.380322] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.380579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.380831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.381065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.383264] env[62109]: INFO nova.compute.manager [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Terminating instance [ 901.384966] env[62109]: DEBUG nova.compute.manager [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 901.385190] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 901.386148] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d59ac7-7c04-4b55-aaec-9a6c19565770 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.395400] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 901.395597] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b20c439-0c2b-49ef-b471-e716c434b3c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.464895] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 901.465157] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 901.465350] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleting the datastore file [datastore1] 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.465627] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39b28728-038c-4c0b-8ead-c0765b47c23f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.472505] env[62109]: DEBUG oslo_vmware.api [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 901.472505] env[62109]: value = "task-1116759" [ 901.472505] env[62109]: _type = "Task" [ 901.472505] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.482084] env[62109]: DEBUG oslo_vmware.api [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.713398] env[62109]: INFO nova.compute.manager [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Took 38.36 seconds to build instance. [ 901.807818] env[62109]: DEBUG nova.compute.utils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 901.809771] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 901.809952] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 901.813874] env[62109]: INFO nova.compute.claims [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.861418] env[62109]: DEBUG nova.policy [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90f539a3e76641829173e2c95a03d554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bdfe29bfa924f66b9087b686e0d7c12', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 901.983852] env[62109]: DEBUG oslo_vmware.api [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129934} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.984053] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.984254] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 901.984640] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 901.984843] env[62109]: INFO nova.compute.manager [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 901.985120] env[62109]: DEBUG oslo.service.loopingcall [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.985347] env[62109]: DEBUG nova.compute.manager [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 901.985448] env[62109]: DEBUG nova.network.neutron [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 902.018020] env[62109]: DEBUG nova.compute.manager [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.018254] env[62109]: DEBUG nova.compute.manager [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing instance network info cache due to event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.018516] env[62109]: DEBUG oslo_concurrency.lockutils [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] Acquiring lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.018621] env[62109]: DEBUG oslo_concurrency.lockutils [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] Acquired lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.018783] env[62109]: DEBUG nova.network.neutron [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 902.215778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e4eb065-fe19-42f7-9d57-efed2126083a tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.877s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.242071] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Successfully created port: aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.321023] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 902.323420] env[62109]: INFO nova.compute.resource_tracker [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating resource usage from migration c4667422-9473-4783-af9c-f6de4a4209a6 [ 902.587572] env[62109]: DEBUG nova.compute.manager [req-812f4328-8815-4137-ba27-14f386737fd9 req-14a29caf-dc11-4fff-a210-4abb0ad49b04 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Received event network-vif-deleted-00cdfb69-948b-48cf-9c2a-107bb3d177c0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.587779] env[62109]: INFO nova.compute.manager [req-812f4328-8815-4137-ba27-14f386737fd9 req-14a29caf-dc11-4fff-a210-4abb0ad49b04 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Neutron deleted interface 00cdfb69-948b-48cf-9c2a-107bb3d177c0; detaching it from the instance and deleting it from the info cache [ 902.588015] env[62109]: DEBUG nova.network.neutron [req-812f4328-8815-4137-ba27-14f386737fd9 req-14a29caf-dc11-4fff-a210-4abb0ad49b04 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.657393] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.658104] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.658104] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.658104] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.658290] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.661239] env[62109]: INFO nova.compute.manager [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Terminating instance [ 902.666806] env[62109]: DEBUG nova.compute.manager [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 902.667028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 902.667866] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f02ca9-ea36-493e-85af-20638fb38e83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.677775] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 902.680323] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c9568b7-0d47-4046-9166-4643e9860468 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.687474] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 902.687474] env[62109]: value = "task-1116760" [ 902.687474] env[62109]: _type = "Task" [ 902.687474] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.698625] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.705740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feed5af2-b6f0-4af4-86be-d4852efb8d36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.713455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249b6454-044c-4cd9-b61c-07c213a9419f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.747399] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c8efc9-4e9a-4baf-9be4-74883301b752 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.755554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "5c7dbe04-5027-49cd-a478-79046fee1f16" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.755916] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.756258] env[62109]: INFO nova.compute.manager [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Rebooting instance [ 902.759731] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5023dd1f-69c5-44ee-996f-2a196fd5f3cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.775746] env[62109]: DEBUG nova.compute.provider_tree [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.791938] env[62109]: DEBUG nova.network.neutron [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updated VIF entry in instance network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.792325] env[62109]: DEBUG nova.network.neutron [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.886905] env[62109]: DEBUG nova.network.neutron [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.094727] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-633eb9ab-01ca-4bf6-be71-51e5bd292bd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.104912] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d3b02c-64d1-4536-b6ce-c83e64c8f5d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.134301] env[62109]: DEBUG nova.compute.manager [req-812f4328-8815-4137-ba27-14f386737fd9 req-14a29caf-dc11-4fff-a210-4abb0ad49b04 service nova] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Detach interface failed, port_id=00cdfb69-948b-48cf-9c2a-107bb3d177c0, reason: Instance 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 903.197409] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116760, 'name': PowerOffVM_Task, 'duration_secs': 0.244776} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.197765] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 903.197942] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 903.198261] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c222126c-68bc-4c94-9d90-60e90ae7cd59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.280167] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.281452] env[62109]: DEBUG nova.scheduler.client.report [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.292146] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 903.292304] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 903.292492] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleting the datastore file [datastore2] 028300fd-f9f8-4606-a39e-53582f830eeb {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.292770] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de5331df-3a8d-4449-aa4b-f5d3da2bc502 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.295076] env[62109]: DEBUG oslo_concurrency.lockutils [req-1b82b69a-1d60-4c9c-8bd7-3fab05fbb39f req-bf3de4b3-dbd6-4a09-80e8-b5a851153441 service nova] Releasing lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.295386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquired lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.295521] env[62109]: DEBUG nova.network.neutron [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 903.302275] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for the task: (returnval){ [ 903.302275] env[62109]: value = "task-1116762" [ 903.302275] env[62109]: _type = "Task" [ 903.302275] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.311535] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.332434] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 903.360658] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.360952] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.361133] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.361328] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.361484] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.361637] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.361851] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.362061] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.362261] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.362435] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.362614] env[62109]: DEBUG nova.virt.hardware [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.363595] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bd6c66-0074-4e6b-9ba8-304b32884d58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.372225] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8222abb9-95a4-4034-9247-31325392e553 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.391060] env[62109]: INFO nova.compute.manager [-] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Took 1.40 seconds to deallocate network for instance. [ 903.787471] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.481s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.787702] env[62109]: INFO nova.compute.manager [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Migrating [ 903.794739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.264s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.795035] env[62109]: DEBUG nova.objects.instance [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lazy-loading 'resources' on Instance uuid c753a2db-d701-4508-88bd-4ebe4f32a075 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.819800] env[62109]: DEBUG oslo_vmware.api [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Task: {'id': task-1116762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132523} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.820093] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.820319] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 903.820498] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 903.820644] env[62109]: INFO nova.compute.manager [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 903.820902] env[62109]: DEBUG oslo.service.loopingcall [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.821334] env[62109]: DEBUG nova.compute.manager [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 903.821435] env[62109]: DEBUG nova.network.neutron [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 903.897098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.952577] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Successfully updated port: aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.085847] env[62109]: DEBUG nova.network.neutron [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.205635] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec63a7d-a748-4b53-923e-5456bdf4a53e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.215056] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a05a735-833c-4284-a1ee-551b9efc693e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.248463] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fd4abb-06af-4a9d-bc07-ba8ae70c8e1e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.257835] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68d4685-e734-4329-99f7-871fdb4aece6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.272941] env[62109]: DEBUG nova.compute.provider_tree [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.309078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.309417] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.309515] env[62109]: DEBUG nova.network.neutron [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.432960] env[62109]: DEBUG nova.compute.manager [req-05639966-6cfe-4db4-86d8-b7b06a288a56 req-d7b7ec8c-1704-4cbb-b03b-be6178f3bfd8 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Received event network-vif-deleted-bb65c0b6-debe-49a6-a623-fc3778c5b9a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.433201] env[62109]: INFO nova.compute.manager [req-05639966-6cfe-4db4-86d8-b7b06a288a56 req-d7b7ec8c-1704-4cbb-b03b-be6178f3bfd8 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Neutron deleted interface bb65c0b6-debe-49a6-a623-fc3778c5b9a8; detaching it from the instance and deleting it from the info cache [ 904.433444] env[62109]: DEBUG nova.network.neutron [req-05639966-6cfe-4db4-86d8-b7b06a288a56 req-d7b7ec8c-1704-4cbb-b03b-be6178f3bfd8 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.457825] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.457972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquired lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.458226] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.589410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Releasing lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.593021] env[62109]: DEBUG nova.compute.manager [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 904.594033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceccd05-bdc8-42d4-89f9-dd829a3eeb66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.682577] env[62109]: DEBUG nova.compute.manager [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Received event network-vif-plugged-aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.682817] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Acquiring lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.683046] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.683239] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.683414] env[62109]: DEBUG nova.compute.manager [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] No waiting events found dispatching network-vif-plugged-aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 904.683780] env[62109]: WARNING nova.compute.manager [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Received unexpected event network-vif-plugged-aa073427-0e13-40b5-a706-b9b7dfb1818c for instance with vm_state building and task_state spawning. [ 904.683995] env[62109]: DEBUG nova.compute.manager [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Received event network-changed-aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.684519] env[62109]: DEBUG nova.compute.manager [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Refreshing instance network info cache due to event network-changed-aa073427-0e13-40b5-a706-b9b7dfb1818c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.684755] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Acquiring lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.778018] env[62109]: DEBUG nova.scheduler.client.report [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.911986] env[62109]: DEBUG nova.network.neutron [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.938267] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71053c1c-f2a6-432e-983c-f8573a9c32f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.948390] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c01f401-a9bd-45ed-97d3-27ffbc8b5e70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.983122] env[62109]: DEBUG nova.compute.manager [req-05639966-6cfe-4db4-86d8-b7b06a288a56 req-d7b7ec8c-1704-4cbb-b03b-be6178f3bfd8 service nova] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Detach interface failed, port_id=bb65c0b6-debe-49a6-a623-fc3778c5b9a8, reason: Instance 028300fd-f9f8-4606-a39e-53582f830eeb could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 905.000543] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.105334] env[62109]: DEBUG nova.network.neutron [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.147581] env[62109]: DEBUG nova.network.neutron [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Updating instance_info_cache with network_info: [{"id": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "address": "fa:16:3e:b3:e0:bb", "network": {"id": "552fc394-d487-4a0b-8621-d02b4ec586a8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1373817521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bdfe29bfa924f66b9087b686e0d7c12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa073427-0e", "ovs_interfaceid": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.281046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.486s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.283413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.691s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.283644] env[62109]: DEBUG nova.objects.instance [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'resources' on Instance uuid 342b7069-22fb-4934-9ec3-8ecbc987696e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.303126] env[62109]: INFO nova.scheduler.client.report [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Deleted allocations for instance c753a2db-d701-4508-88bd-4ebe4f32a075 [ 905.415480] env[62109]: INFO nova.compute.manager [-] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Took 1.59 seconds to deallocate network for instance. [ 905.614063] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.616678] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66d918d-aa3c-4eec-9930-c887224b438c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.625419] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Doing hard reboot of VM {{(pid=62109) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 905.626178] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-50b7063d-c570-449c-83fa-0c3c1a0f42a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.633328] env[62109]: DEBUG oslo_vmware.api [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 905.633328] env[62109]: value = "task-1116763" [ 905.633328] env[62109]: _type = "Task" [ 905.633328] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.641955] env[62109]: DEBUG oslo_vmware.api [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116763, 'name': ResetVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.650222] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Releasing lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.650579] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Instance network_info: |[{"id": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "address": "fa:16:3e:b3:e0:bb", "network": {"id": "552fc394-d487-4a0b-8621-d02b4ec586a8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1373817521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bdfe29bfa924f66b9087b686e0d7c12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa073427-0e", "ovs_interfaceid": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 905.651110] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Acquired lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.651201] env[62109]: DEBUG nova.network.neutron [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Refreshing network info cache for port aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 905.652321] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:e0:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97b68ed7-8461-4345-b064-96a1dde53a86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa073427-0e13-40b5-a706-b9b7dfb1818c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.659791] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Creating folder: Project (5bdfe29bfa924f66b9087b686e0d7c12). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 905.660801] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c092fadb-c52f-4bf0-bc89-a67b7a110a7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.673839] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Created folder: Project (5bdfe29bfa924f66b9087b686e0d7c12) in parent group-v244329. [ 905.674056] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Creating folder: Instances. Parent ref: group-v244482. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 905.674310] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e07b1dd-27ba-49c9-936f-042ab6ce82a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.685131] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Created folder: Instances in parent group-v244482. [ 905.685399] env[62109]: DEBUG oslo.service.loopingcall [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.685910] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 905.685910] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daff81ed-aee7-4fd9-af04-369d0b9d288c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.707121] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.707121] env[62109]: value = "task-1116766" [ 905.707121] env[62109]: _type = "Task" [ 905.707121] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.716096] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116766, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.812692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3260433-6a91-4550-bbcf-288fddb4c257 tempest-ServerRescueTestJSON-152982729 tempest-ServerRescueTestJSON-152982729-project-member] Lock "c753a2db-d701-4508-88bd-4ebe4f32a075" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.783s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.924269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.115640] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404f80e3-ecd1-40f8-8214-77fd9eec8d80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.130735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bb2e70-1b79-4810-9bdd-020d06442fd3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.167301] env[62109]: DEBUG oslo_vmware.api [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116763, 'name': ResetVM_Task, 'duration_secs': 0.126362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.170688] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Did hard reboot of VM {{(pid=62109) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 906.170888] env[62109]: DEBUG nova.compute.manager [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 906.171658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971e9ec6-8eba-4c6c-a0ce-ec0a960c7b87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.174550] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab3fec6-0c72-4bb6-a01c-b4fdc3429792 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.186372] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591df834-4025-4d4d-a913-06ad13e07d90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.204788] env[62109]: DEBUG nova.compute.provider_tree [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.219625] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116766, 'name': CreateVM_Task, 'duration_secs': 0.353527} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.219802] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 906.220522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.220716] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.221072] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.221325] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-189036da-5b91-4c95-8f1b-5cb9b2321b19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.228374] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 906.228374] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bad435-6286-224a-ce77-bcd190ec8b16" [ 906.228374] env[62109]: _type = "Task" [ 906.228374] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.237669] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bad435-6286-224a-ce77-bcd190ec8b16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.387037] env[62109]: DEBUG nova.network.neutron [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Updated VIF entry in instance network info cache for port aa073427-0e13-40b5-a706-b9b7dfb1818c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 906.387156] env[62109]: DEBUG nova.network.neutron [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Updating instance_info_cache with network_info: [{"id": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "address": "fa:16:3e:b3:e0:bb", "network": {"id": "552fc394-d487-4a0b-8621-d02b4ec586a8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1373817521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bdfe29bfa924f66b9087b686e0d7c12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa073427-0e", "ovs_interfaceid": "aa073427-0e13-40b5-a706-b9b7dfb1818c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.697672] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3230fb-4d46-4860-8cd4-d74a92abf606 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.942s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.707776] env[62109]: DEBUG nova.scheduler.client.report [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.739847] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bad435-6286-224a-ce77-bcd190ec8b16, 'name': SearchDatastore_Task, 'duration_secs': 0.011987} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.740423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.740673] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.740919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.741083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.741360] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.741527] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2409118-5c99-4e47-945a-46d5e5e73d3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.750860] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.751053] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 906.751756] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d9d71c7-1d81-4a7d-ad60-81a4473275ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.757834] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 906.757834] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528acdcd-39f9-26ef-6587-16ab8c0da04e" [ 906.757834] env[62109]: _type = "Task" [ 906.757834] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.766195] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528acdcd-39f9-26ef-6587-16ab8c0da04e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.890162] env[62109]: DEBUG oslo_concurrency.lockutils [req-61f61691-2001-474f-a107-bf15a9ab668f req-2ba7ef53-3bb2-4a39-81d5-98fe7a891948 service nova] Releasing lock "refresh_cache-f91f4482-b18d-4883-9f6b-3bc5a386eedd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.136278] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f63da65-0eaf-44d7-9cf3-9a6bf4b00a49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.157444] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 907.212140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.929s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.215988] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.754s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.217522] env[62109]: INFO nova.compute.claims [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.245648] env[62109]: INFO nova.scheduler.client.report [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleted allocations for instance 342b7069-22fb-4934-9ec3-8ecbc987696e [ 907.272260] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528acdcd-39f9-26ef-6587-16ab8c0da04e, 'name': SearchDatastore_Task, 'duration_secs': 0.00946} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.273038] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-559cbcd6-05bf-46c5-9107-9d14494dc8f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.279888] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 907.279888] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5233f75b-cdc3-0f70-4799-3d2830e8d12f" [ 907.279888] env[62109]: _type = "Task" [ 907.279888] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.290740] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5233f75b-cdc3-0f70-4799-3d2830e8d12f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.663372] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.663710] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-032db921-a408-4c11-a452-74d6ba32e766 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.674805] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 907.674805] env[62109]: value = "task-1116767" [ 907.674805] env[62109]: _type = "Task" [ 907.674805] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.686457] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.754404] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c9d70b52-0a2d-4deb-bba2-db43a9a18cb2 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "342b7069-22fb-4934-9ec3-8ecbc987696e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.861s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.793427] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5233f75b-cdc3-0f70-4799-3d2830e8d12f, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.795018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.795018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] f91f4482-b18d-4883-9f6b-3bc5a386eedd/f91f4482-b18d-4883-9f6b-3bc5a386eedd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 907.795018] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91f0f196-835e-48f3-a344-877d560db76e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.804077] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 907.804077] env[62109]: value = "task-1116768" [ 907.804077] env[62109]: _type = "Task" [ 907.804077] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.816753] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.190274] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116767, 'name': PowerOffVM_Task, 'duration_secs': 0.254203} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.190861] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 908.192225] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 908.322230] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116768, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48897} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.322511] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] f91f4482-b18d-4883-9f6b-3bc5a386eedd/f91f4482-b18d-4883-9f6b-3bc5a386eedd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 908.322732] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.323053] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c598aeb9-ccc1-4d29-829f-d9f011607e1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.334106] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 908.334106] env[62109]: value = "task-1116769" [ 908.334106] env[62109]: _type = "Task" [ 908.334106] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.344764] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116769, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.384580] env[62109]: DEBUG nova.compute.manager [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.384580] env[62109]: DEBUG nova.compute.manager [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing instance network info cache due to event network-changed-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.384580] env[62109]: DEBUG oslo_concurrency.lockutils [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] Acquiring lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.384580] env[62109]: DEBUG oslo_concurrency.lockutils [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] Acquired lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.384750] env[62109]: DEBUG nova.network.neutron [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Refreshing network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 908.659837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc3aace-01c4-4617-8740-d5ef7276e693 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.669501] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9896535-8013-4022-8813-f013d0f18c96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.705477] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.705987] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.706347] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.706694] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.706985] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.707299] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.707689] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.707908] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.708132] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.708425] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.708887] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.716545] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d8fad9-251e-4f93-8941-5ea6bd76b7f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.733694] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d565adc4-0ca7-4195-9fa6-15577d3426b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.744911] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22d39b3-79a6-4b72-9c45-74531500a818 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.750249] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 908.750249] env[62109]: value = "task-1116770" [ 908.750249] env[62109]: _type = "Task" [ 908.750249] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.761821] env[62109]: DEBUG nova.compute.provider_tree [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.773749] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.845490] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116769, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081315} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.845923] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.847053] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c505c4d-1094-4944-b47f-3891bf23cd6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.872956] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] f91f4482-b18d-4883-9f6b-3bc5a386eedd/f91f4482-b18d-4883-9f6b-3bc5a386eedd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.873781] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b88f1ac9-c2f3-4bab-a117-5bac402cd752 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.897731] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 908.897731] env[62109]: value = "task-1116771" [ 908.897731] env[62109]: _type = "Task" [ 908.897731] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.083551] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "5c7dbe04-5027-49cd-a478-79046fee1f16" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.083821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.084041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.084225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.084391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.086980] env[62109]: INFO nova.compute.manager [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Terminating instance [ 909.090030] env[62109]: DEBUG nova.compute.manager [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 909.090030] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 909.090290] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebffdcf-1b4f-4584-aa20-d09a217898b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.098893] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 909.099575] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1591acf1-f2a7-4241-9356-fb6dc3a8388f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.107179] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 909.107179] env[62109]: value = "task-1116772" [ 909.107179] env[62109]: _type = "Task" [ 909.107179] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.117644] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.169305] env[62109]: DEBUG nova.network.neutron [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updated VIF entry in instance network info cache for port 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 909.169707] env[62109]: DEBUG nova.network.neutron [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [{"id": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "address": "fa:16:3e:32:53:da", "network": {"id": "f5e70352-43f1-423d-8e31-44ae247ddba2", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-426993836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cddb4c7a9ba442d98d6cf4f3ab30ad71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8398d68d-f5", "ovs_interfaceid": "8398d68d-f5f3-4bd3-8e76-aa3d0916ece2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.261416] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116770, 'name': ReconfigVM_Task, 'duration_secs': 0.326817} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.261932] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 909.265869] env[62109]: DEBUG nova.scheduler.client.report [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.411508] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116771, 'name': ReconfigVM_Task, 'duration_secs': 0.310135} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.411836] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Reconfigured VM instance instance-0000004d to attach disk [datastore1] f91f4482-b18d-4883-9f6b-3bc5a386eedd/f91f4482-b18d-4883-9f6b-3bc5a386eedd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.412511] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6bc0f1ad-e913-44dd-82f6-11fb7a79d0c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.425029] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 909.425029] env[62109]: value = "task-1116773" [ 909.425029] env[62109]: _type = "Task" [ 909.425029] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.432647] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116773, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.617800] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116772, 'name': PowerOffVM_Task, 'duration_secs': 0.340897} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.618099] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 909.618297] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 909.618560] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e9ed086-efbb-478e-a55b-c936c352d1f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.676850] env[62109]: DEBUG oslo_concurrency.lockutils [req-39a5fd38-205e-41ce-a337-8fcf05596ce3 req-88ed3c46-54f8-4ca6-a223-89bfb14f47ae service nova] Releasing lock "refresh_cache-5c7dbe04-5027-49cd-a478-79046fee1f16" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.695859] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 909.696414] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 909.696632] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleting the datastore file [datastore1] 5c7dbe04-5027-49cd-a478-79046fee1f16 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.696920] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9db4408a-2775-467b-b0de-b4af41b35e7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.705226] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 909.705226] env[62109]: value = "task-1116775" [ 909.705226] env[62109]: _type = "Task" [ 909.705226] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.715312] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.773024] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.773918] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.774352] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.777218] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.777218] env[62109]: DEBUG nova.virt.hardware [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.781486] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfiguring VM instance instance-00000047 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 909.782403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.783146] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.786369] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29e936da-37d8-4fc2-b1c0-aed2cbdffc5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.803023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.584s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.803023] env[62109]: DEBUG nova.objects.instance [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'resources' on Instance uuid 7afbb35b-9865-40a7-8b37-d6a661a186a9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.813183] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 909.813183] env[62109]: value = "task-1116776" [ 909.813183] env[62109]: _type = "Task" [ 909.813183] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.828148] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116776, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.934175] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116773, 'name': Rename_Task, 'duration_secs': 0.158691} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.934477] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 909.934770] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da85c664-dd86-4f5b-a693-cdedc5ce119d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.942771] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 909.942771] env[62109]: value = "task-1116777" [ 909.942771] env[62109]: _type = "Task" [ 909.942771] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.957157] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.220021] env[62109]: DEBUG oslo_vmware.api [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129068} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.220021] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.220021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 910.220021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 910.220021] env[62109]: INFO nova.compute.manager [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Took 1.13 seconds to destroy the instance on the hypervisor. [ 910.220021] env[62109]: DEBUG oslo.service.loopingcall [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.220021] env[62109]: DEBUG nova.compute.manager [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 910.220021] env[62109]: DEBUG nova.network.neutron [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 910.260399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.260647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.309160] env[62109]: DEBUG nova.compute.utils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.311336] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 910.311513] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 910.325195] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116776, 'name': ReconfigVM_Task, 'duration_secs': 0.177485} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.326114] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfigured VM instance instance-00000047 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 910.327111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeef9f7-38e2-4c34-8957-74ae98dce5df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.352845] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.357111] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21242995-72bc-425c-8414-33dd984c8c70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.373743] env[62109]: DEBUG nova.policy [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2988618e18934aa6b85d2ea288917ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275238e3083540aa838de6d5cccf61eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 910.382518] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 910.382518] env[62109]: value = "task-1116778" [ 910.382518] env[62109]: _type = "Task" [ 910.382518] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.396774] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.461066] env[62109]: DEBUG oslo_vmware.api [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116777, 'name': PowerOnVM_Task, 'duration_secs': 0.496406} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.461819] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 910.461819] env[62109]: INFO nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Took 7.13 seconds to spawn the instance on the hypervisor. [ 910.462073] env[62109]: DEBUG nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.463009] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037c1e02-6019-4de7-bfe4-95619a83f1fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.583618] env[62109]: DEBUG nova.compute.manager [req-746993ed-3883-48a5-8b12-d9c44f6ca28a req-657b6aca-25d5-4d01-9603-dffbe511de21 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Received event network-vif-deleted-8398d68d-f5f3-4bd3-8e76-aa3d0916ece2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.583826] env[62109]: INFO nova.compute.manager [req-746993ed-3883-48a5-8b12-d9c44f6ca28a req-657b6aca-25d5-4d01-9603-dffbe511de21 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Neutron deleted interface 8398d68d-f5f3-4bd3-8e76-aa3d0916ece2; detaching it from the instance and deleting it from the info cache [ 910.584017] env[62109]: DEBUG nova.network.neutron [req-746993ed-3883-48a5-8b12-d9c44f6ca28a req-657b6aca-25d5-4d01-9603-dffbe511de21 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.762815] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.777284] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e9326d-ef84-4d2a-8848-a52e8aea1c40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.788296] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238e7da0-d67f-4138-82f1-f6e7192a4357 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.819782] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 910.823823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bb92cc-6042-48f8-bd64-4c99d0f8b65c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.829552] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Successfully created port: a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.835149] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79be5e81-21fd-4ffa-8856-f6276c93201e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.849844] env[62109]: DEBUG nova.compute.provider_tree [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.892588] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116778, 'name': ReconfigVM_Task, 'duration_secs': 0.305872} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.892879] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509/66bbe1e6-e5ee-46a0-b95c-449eef636509.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.893172] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 910.983938] env[62109]: INFO nova.compute.manager [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Took 30.88 seconds to build instance. [ 911.038662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.038904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.061946] env[62109]: DEBUG nova.network.neutron [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.089594] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1daa729b-b78f-4ea5-93d3-e46ce311595b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.101018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342d0ae8-5518-4294-8964-458c2ed1996a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.131724] env[62109]: DEBUG nova.compute.manager [req-746993ed-3883-48a5-8b12-d9c44f6ca28a req-657b6aca-25d5-4d01-9603-dffbe511de21 service nova] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Detach interface failed, port_id=8398d68d-f5f3-4bd3-8e76-aa3d0916ece2, reason: Instance 5c7dbe04-5027-49cd-a478-79046fee1f16 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 911.290435] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.353358] env[62109]: DEBUG nova.scheduler.client.report [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.401139] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bffad9d-886c-4842-ba9f-e5ae6cb5e288 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.423030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655ec0f4-c817-4550-a7d8-da90fe4cded0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.444610] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 911.485747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ee779fa5-989f-4c97-90bc-ad6aadf6ed73 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.390s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.541250] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.563908] env[62109]: INFO nova.compute.manager [-] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Took 1.34 seconds to deallocate network for instance. [ 911.831121] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 911.858451] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.861206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.857s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.861206] env[62109]: DEBUG nova.objects.instance [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 911.874836] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ca236167e5e3a0523b36bb9e46063aca',container_format='bare',created_at=2024-10-03T07:57:33Z,direct_url=,disk_format='vmdk',id=ee68a607-5d22-48e9-a6df-4918df414238,min_disk=1,min_ram=0,name='tempest-test-snap-274583717',owner='275238e3083540aa838de6d5cccf61eb',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-03T07:57:51Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.874836] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.874836] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.875114] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.875539] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.875766] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.876033] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.876504] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.876504] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.876695] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.876913] env[62109]: DEBUG nova.virt.hardware [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.878571] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81087af2-4e59-4ab0-80c9-e9e46610c3c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.894449] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f15748-7775-41c0-86b1-9f421502df1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.899164] env[62109]: INFO nova.scheduler.client.report [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted allocations for instance 7afbb35b-9865-40a7-8b37-d6a661a186a9 [ 911.988781] env[62109]: DEBUG nova.network.neutron [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Port 70216814-67e6-4c4a-80a6-94f8cf8dd246 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 912.061706] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.072578] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.410951] env[62109]: DEBUG nova.compute.manager [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Received event network-vif-plugged-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.411309] env[62109]: DEBUG oslo_concurrency.lockutils [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] Acquiring lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.411555] env[62109]: DEBUG oslo_concurrency.lockutils [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.411841] env[62109]: DEBUG oslo_concurrency.lockutils [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.412428] env[62109]: DEBUG nova.compute.manager [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] No waiting events found dispatching network-vif-plugged-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 912.412686] env[62109]: WARNING nova.compute.manager [req-7f4d93c6-aaa5-4403-8375-4ea838aaf016 req-225bc4ba-a945-4ef9-a8a6-e0ab68d42e95 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Received unexpected event network-vif-plugged-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 for instance with vm_state building and task_state spawning. [ 912.417070] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63e48633-8dc7-4a59-9a66-ae929298793c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.280s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.417949] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Acquired lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.420921] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d916d342-0e99-4e6a-8498-844e26d20d09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.429389] env[62109]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 912.429494] env[62109]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62109) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 912.429907] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-920daea6-a347-4c3e-8ead-6ac940c49bf0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.440369] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8c86cc-6b27-464e-8cfe-fc226c227496 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.473872] env[62109]: ERROR root [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-244431' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-244431' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-244431' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-244431'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-244431' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-244431' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-244431'}\n"]: nova.exception.InstanceNotFound: Instance 7afbb35b-9865-40a7-8b37-d6a661a186a9 could not be found. [ 912.474207] env[62109]: DEBUG oslo_concurrency.lockutils [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] Releasing lock "7afbb35b-9865-40a7-8b37-d6a661a186a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.474358] env[62109]: DEBUG nova.compute.manager [req-18720bf8-d19a-472e-b8da-e4878ec8a4db req-7ca18423-8ab2-46a4-9ee5-3dea914c4b29 service nova] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Detach interface failed, port_id=a54ea2c9-4872-4e92-893b-ad7c797f25ac, reason: Instance 7afbb35b-9865-40a7-8b37-d6a661a186a9 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 912.552177] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Successfully updated port: a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.871377] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98341bc3-d4cc-4d6e-a788-77947dd0373a tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.872949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.511s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.874513] env[62109]: INFO nova.compute.claims [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.886302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.886563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.886771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.886960] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.887192] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.889125] env[62109]: INFO nova.compute.manager [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Terminating instance [ 912.890931] env[62109]: DEBUG nova.compute.manager [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 912.891138] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 912.892261] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408f8312-1023-4b13-ba89-5e5ece43e534 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.900726] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 912.900984] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3437b781-9b28-4702-8167-2198a7df507f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.912286] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 912.912286] env[62109]: value = "task-1116779" [ 912.912286] env[62109]: _type = "Task" [ 912.912286] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.925472] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.012632] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.012906] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.013146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.057146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.057146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.057146] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 913.424226] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116779, 'name': PowerOffVM_Task, 'duration_secs': 0.194141} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.425034] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 913.425274] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 913.425837] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c014a25a-6a88-44a4-a08f-47910a8467c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.499978] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 913.500242] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 913.500435] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Deleting the datastore file [datastore1] f91f4482-b18d-4883-9f6b-3bc5a386eedd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.500793] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5849be82-7993-43d6-add2-96607898fd3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.508256] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for the task: (returnval){ [ 913.508256] env[62109]: value = "task-1116781" [ 913.508256] env[62109]: _type = "Task" [ 913.508256] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.519593] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.636747] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 913.893104] env[62109]: DEBUG nova.network.neutron [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Updating instance_info_cache with network_info: [{"id": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "address": "fa:16:3e:69:49:7a", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa374ac72-8b", "ovs_interfaceid": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.023730] env[62109]: DEBUG oslo_vmware.api [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Task: {'id': task-1116781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170605} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.024022] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.024219] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 914.024404] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 914.024580] env[62109]: INFO nova.compute.manager [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 914.024822] env[62109]: DEBUG oslo.service.loopingcall [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.025329] env[62109]: DEBUG nova.compute.manager [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 914.025427] env[62109]: DEBUG nova.network.neutron [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 914.219296] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a544c84-a648-4e85-9617-d69d12a290d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.229047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015dcad3-da28-4b6f-b2d9-1bcb58d14fed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.261728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe6273e-be0d-460b-9b5f-9681dbf28a69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.271902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef5cca8-0959-448e-abda-3568eaf2032a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.287269] env[62109]: DEBUG nova.compute.provider_tree [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.292393] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.292393] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.292393] env[62109]: DEBUG nova.network.neutron [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 914.394677] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.395028] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Instance network_info: |[{"id": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "address": "fa:16:3e:69:49:7a", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa374ac72-8b", "ovs_interfaceid": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 914.395574] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:49:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a374ac72-8bf4-4d62-9108-eb77cdb6e2d6', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.407423] env[62109]: DEBUG oslo.service.loopingcall [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.407746] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 914.408095] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ed51613-635f-4f38-8ad2-94b73301bba6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.436404] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.436404] env[62109]: value = "task-1116782" [ 914.436404] env[62109]: _type = "Task" [ 914.436404] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.442613] env[62109]: DEBUG nova.compute.manager [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Received event network-changed-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.442613] env[62109]: DEBUG nova.compute.manager [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Refreshing instance network info cache due to event network-changed-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 914.442771] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] Acquiring lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.442893] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] Acquired lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.443064] env[62109]: DEBUG nova.network.neutron [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Refreshing network info cache for port a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 914.451906] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116782, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.499252] env[62109]: DEBUG nova.compute.manager [req-c1c35ab4-1e64-4a23-aa1a-b044997fc82f req-3d4268b7-1e0b-421f-9a05-9587b3c2768b service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Received event network-vif-deleted-aa073427-0e13-40b5-a706-b9b7dfb1818c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.499486] env[62109]: INFO nova.compute.manager [req-c1c35ab4-1e64-4a23-aa1a-b044997fc82f req-3d4268b7-1e0b-421f-9a05-9587b3c2768b service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Neutron deleted interface aa073427-0e13-40b5-a706-b9b7dfb1818c; detaching it from the instance and deleting it from the info cache [ 914.499681] env[62109]: DEBUG nova.network.neutron [req-c1c35ab4-1e64-4a23-aa1a-b044997fc82f req-3d4268b7-1e0b-421f-9a05-9587b3c2768b service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.792276] env[62109]: DEBUG nova.scheduler.client.report [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 914.937888] env[62109]: DEBUG nova.network.neutron [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.958020] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116782, 'name': CreateVM_Task, 'duration_secs': 0.312973} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.958020] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 914.958020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.958020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.958020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 914.958020] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f6d3735-9568-4743-9700-1cc4d6bd5c77 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.965944] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 914.965944] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528c44e1-6fe2-497e-d52a-862bc59ce9bd" [ 914.965944] env[62109]: _type = "Task" [ 914.965944] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.977516] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528c44e1-6fe2-497e-d52a-862bc59ce9bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.002269] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a2d2744-8321-4532-a511-8b6895c564c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.014679] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ff9ff2-804e-4093-bef2-a9825b47ba65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.051175] env[62109]: DEBUG nova.compute.manager [req-c1c35ab4-1e64-4a23-aa1a-b044997fc82f req-3d4268b7-1e0b-421f-9a05-9587b3c2768b service nova] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Detach interface failed, port_id=aa073427-0e13-40b5-a706-b9b7dfb1818c, reason: Instance f91f4482-b18d-4883-9f6b-3bc5a386eedd could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 915.187509] env[62109]: DEBUG nova.network.neutron [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.260784] env[62109]: DEBUG nova.network.neutron [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Updated VIF entry in instance network info cache for port a374ac72-8bf4-4d62-9108-eb77cdb6e2d6. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 915.261188] env[62109]: DEBUG nova.network.neutron [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Updating instance_info_cache with network_info: [{"id": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "address": "fa:16:3e:69:49:7a", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa374ac72-8b", "ovs_interfaceid": "a374ac72-8bf4-4d62-9108-eb77cdb6e2d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.298596] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.299127] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.302042] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.261s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.303389] env[62109]: INFO nova.compute.claims [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.441021] env[62109]: INFO nova.compute.manager [-] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Took 1.42 seconds to deallocate network for instance. [ 915.478356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.478683] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Processing image ee68a607-5d22-48e9-a6df-4918df414238 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.479276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.479276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.479276] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.479564] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1d0bf57-c9d2-4d02-bffd-ceb915afe4c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.489547] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.490615] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 915.490615] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68d08366-581a-4df6-825f-6003f7cede23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.498786] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 915.498786] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524dce74-60e7-a0ab-0063-301ac0610634" [ 915.498786] env[62109]: _type = "Task" [ 915.498786] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.509027] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524dce74-60e7-a0ab-0063-301ac0610634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.567933] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.568158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.689938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.763686] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f58ee30-5075-4f63-9cbf-eb44917959e4 req-68ba127b-2dcb-4051-b890-0becafa0fe76 service nova] Releasing lock "refresh_cache-128ae6c9-1f82-4c67-83be-42cb554c2fd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.808456] env[62109]: DEBUG nova.compute.utils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 915.811736] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 915.811905] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 915.857546] env[62109]: DEBUG nova.policy [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4cac0fc21b44e719f3fbb91cbfeeb20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd79b6e383494f2bb88bd4a0e388f18d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 915.948379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.009296] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 916.009586] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Fetch image to [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa/OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 916.009801] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Downloading stream optimized image ee68a607-5d22-48e9-a6df-4918df414238 to [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa/OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa.vmdk on the data store datastore1 as vApp {{(pid=62109) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 916.009981] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Downloading image file data ee68a607-5d22-48e9-a6df-4918df414238 to the ESX as VM named 'OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa' {{(pid=62109) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 916.072647] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 916.092748] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 916.092748] env[62109]: value = "resgroup-9" [ 916.092748] env[62109]: _type = "ResourcePool" [ 916.092748] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 916.092748] env[62109]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b5051cbb-33a9-4b84-b357-cfd36bb0b9c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.116032] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease: (returnval){ [ 916.116032] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 916.116032] env[62109]: _type = "HttpNfcLease" [ 916.116032] env[62109]: } obtained for vApp import into resource pool (val){ [ 916.116032] env[62109]: value = "resgroup-9" [ 916.116032] env[62109]: _type = "ResourcePool" [ 916.116032] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 916.116537] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the lease: (returnval){ [ 916.116537] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 916.116537] env[62109]: _type = "HttpNfcLease" [ 916.116537] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 916.124229] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.124229] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 916.124229] env[62109]: _type = "HttpNfcLease" [ 916.124229] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 916.224627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f23e6e9-fd34-4ef4-90fd-8da5e754e971 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.247166] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Successfully created port: 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.250007] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4319eeb9-1330-4608-9369-68483a72b113 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.259548] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 916.312307] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.593993] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.629474] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.629474] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 916.629474] env[62109]: _type = "HttpNfcLease" [ 916.629474] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 916.709420] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37f5984-694d-4d8f-b18f-d5756e11f3b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.719315] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd8e97e-ef2a-4722-9fdb-24d1f0ad149f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.755105] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cac4524-1820-4c5f-bc2d-425dfa66f914 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.764516] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b45fa5c-8d21-4e6e-810f-c6bc650466b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.770711] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 916.770711] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e46d7a99-5354-44e4-a4c5-d14b6257f647 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.788015] env[62109]: DEBUG nova.compute.provider_tree [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.788015] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 916.788015] env[62109]: value = "task-1116784" [ 916.788015] env[62109]: _type = "Task" [ 916.788015] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.798022] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.127224] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 917.127224] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 917.127224] env[62109]: _type = "HttpNfcLease" [ 917.127224] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 917.127710] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 917.127710] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5211e20c-e305-6e78-02c6-f2bbcca9c91b" [ 917.127710] env[62109]: _type = "HttpNfcLease" [ 917.127710] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 917.128360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3376a5-ad6d-4c16-a1cc-b60898ae6105 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.138639] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 917.138874] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 917.207901] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5199712-7592-45d9-8119-54eb7b8637c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.289748] env[62109]: DEBUG nova.scheduler.client.report [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.306184] env[62109]: DEBUG oslo_vmware.api [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116784, 'name': PowerOnVM_Task, 'duration_secs': 0.435062} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.306573] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 917.306766] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7db3b6f1-02d8-4522-8ec3-799d08aee3b6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance '66bbe1e6-e5ee-46a0-b95c-449eef636509' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 917.326423] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.357071] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.357347] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.357511] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.357700] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.357851] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.358011] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.358225] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.358436] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.358589] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.358763] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.358941] env[62109]: DEBUG nova.virt.hardware [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.360074] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba89a48-1b92-4818-b272-a1f21e519824 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.368584] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc3ebd-bd9b-407b-8a18-d0c315f2c689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.795815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.796451] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 917.800015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.380s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.800266] env[62109]: DEBUG nova.objects.instance [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'resources' on Instance uuid 1399f618-3a93-4731-a59b-f98306d6cd52 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.908803] env[62109]: DEBUG nova.compute.manager [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-vif-plugged-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.909134] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.909452] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.909721] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.910087] env[62109]: DEBUG nova.compute.manager [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] No waiting events found dispatching network-vif-plugged-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 917.910292] env[62109]: WARNING nova.compute.manager [req-0a35fd95-c740-4bb0-a9c4-12fe36db80af req-9772edc9-75b9-42a2-9831-d32d06e78246 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received unexpected event network-vif-plugged-63549817-3bd1-441c-af9c-739682b35cf2 for instance with vm_state building and task_state spawning. [ 918.152704] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Successfully updated port: 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.312885] env[62109]: DEBUG nova.compute.utils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 918.316053] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 918.316178] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 918.399862] env[62109]: DEBUG nova.policy [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b39ff10ac8bd4e4abf04fd881e5125ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e5867b8b7e4ed18c5395baf46db66f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 918.452372] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 918.452650] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 918.453758] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c7941e-349d-4c7d-ba26-593ac1fbbb00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.465671] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 918.465865] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 918.466271] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ddf6e1d8-308b-44e0-bd05-752bbf96de9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.642477] env[62109]: DEBUG oslo_vmware.rw_handles [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52395b1d-a9b5-81c8-efa3-ae743095707c/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 918.642761] env[62109]: INFO nova.virt.vmwareapi.images [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Downloaded image file data ee68a607-5d22-48e9-a6df-4918df414238 [ 918.643642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68475eb1-d9ab-4f3b-b868-dbd87169ae89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.665027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.665027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.665180] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 918.670039] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f7c2238-0239-44b3-87a0-46a7dde5c03e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.701816] env[62109]: INFO nova.virt.vmwareapi.images [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] The imported VM was unregistered [ 918.704842] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 918.705116] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating directory with path [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.705397] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8705169-9908-4f64-9504-18c0c7d72e0e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.732584] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created directory with path [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.732796] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa/OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa.vmdk to [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk. {{(pid=62109) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 918.733074] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-faecdc44-b54c-491c-9eb3-6dea478b1edb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.736933] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e90d76-1560-405d-a8ad-a8b910b5ba64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.745092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec155d3-ac58-415d-9025-9a3f8804729d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.748506] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 918.748506] env[62109]: value = "task-1116786" [ 918.748506] env[62109]: _type = "Task" [ 918.748506] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.777751] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc2f815-a096-4771-bc1e-384565f0be97 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.783458] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.788819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c8e7a9-8f9e-46ab-bb68-e21f55647d2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.803289] env[62109]: DEBUG nova.compute.provider_tree [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.819793] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 918.854470] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Successfully created port: 14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.217443] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 919.266353] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.308755] env[62109]: DEBUG nova.scheduler.client.report [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.387643] env[62109]: DEBUG nova.network.neutron [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.764224] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.815604] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.818398] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 29.686s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.830549] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 919.841362] env[62109]: INFO nova.scheduler.client.report [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleted allocations for instance 1399f618-3a93-4731-a59b-f98306d6cd52 [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 919.869021] env[62109]: DEBUG nova.virt.hardware [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 919.869587] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03746009-77a2-44ed-9dcf-138df45ea209 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.878703] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120d4051-3c12-409c-bfd4-1af9431fb838 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.895066] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.895395] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Instance network_info: |[{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 919.897302] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:dd:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63549817-3bd1-441c-af9c-739682b35cf2', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.905183] env[62109]: DEBUG oslo.service.loopingcall [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.906333] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 919.906333] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6aebf430-afcc-4f53-b41b-513d29747c3e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.928846] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.928846] env[62109]: value = "task-1116787" [ 919.928846] env[62109]: _type = "Task" [ 919.928846] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.938732] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116787, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.995252] env[62109]: DEBUG nova.compute.manager [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-changed-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.995481] env[62109]: DEBUG nova.compute.manager [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing instance network info cache due to event network-changed-63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.995762] env[62109]: DEBUG oslo_concurrency.lockutils [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.995914] env[62109]: DEBUG oslo_concurrency.lockutils [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.996095] env[62109]: DEBUG nova.network.neutron [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 920.044215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.044514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.044717] env[62109]: DEBUG nova.compute.manager [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Going to confirm migration 2 {{(pid=62109) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 920.269024] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.324265] env[62109]: INFO nova.compute.claims [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.349368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3beedaf-5b7a-49e8-ba6e-4efac2a59834 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "1399f618-3a93-4731-a59b-f98306d6cd52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.721s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.441479] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116787, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.681056] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Successfully updated port: 14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.714986] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.714986] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.715086] env[62109]: DEBUG nova.network.neutron [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 920.715305] env[62109]: DEBUG nova.objects.instance [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'info_cache' on Instance uuid 66bbe1e6-e5ee-46a0-b95c-449eef636509 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.764280] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.779158] env[62109]: DEBUG nova.network.neutron [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updated VIF entry in instance network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 920.779609] env[62109]: DEBUG nova.network.neutron [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.831134] env[62109]: INFO nova.compute.resource_tracker [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating resource usage from migration a1545213-e534-441e-8d4e-0d5c5aa97374 [ 920.946277] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116787, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.182885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.183088] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.184029] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 921.201822] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2676bd-e413-461a-8a4c-46bf476adb29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.210392] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174a7903-2d3b-4db3-82e3-eebee6e6ff23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.246426] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b9e7d8-dad0-4d7f-99d9-4f5fc825c981 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.265271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23020309-0f55-4e57-ab18-23f4de748bcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.282166] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116786, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.366498} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.282705] env[62109]: DEBUG nova.compute.provider_tree [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.284212] env[62109]: DEBUG oslo_concurrency.lockutils [req-56502d64-3556-459b-850d-3fbbf58f5bda req-405aa167-8e23-42c8-9bb9-eb3f8c1c50a3 service nova] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.284583] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa/OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa.vmdk to [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk. [ 921.284784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Cleaning up location [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 921.284952] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_fbc63d5c-4c98-4ae2-9384-f8275226aafa {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.285414] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cfb511f-3317-4211-af54-ad009d7d93f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.293266] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 921.293266] env[62109]: value = "task-1116788" [ 921.293266] env[62109]: _type = "Task" [ 921.293266] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.303585] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.442105] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116787, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.733738] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 921.786651] env[62109]: DEBUG nova.scheduler.client.report [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.804086] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.807482] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 921.807785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.808159] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk to [datastore1] 128ae6c9-1f82-4c67-83be-42cb554c2fd3/128ae6c9-1f82-4c67-83be-42cb554c2fd3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 921.811076] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e00e3ed-c27a-4d18-bc65-f8df31b7387f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.819074] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 921.819074] env[62109]: value = "task-1116789" [ 921.819074] env[62109]: _type = "Task" [ 921.819074] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.827883] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.944619] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116787, 'name': CreateVM_Task, 'duration_secs': 1.577738} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.945088] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 921.945875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.946202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.946814] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 921.947204] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b056a4-64a8-4bf3-9877-dad51a9c5b8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.952185] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 921.952185] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e538-1a44-a28d-bff1-77ee6c045fbd" [ 921.952185] env[62109]: _type = "Task" [ 921.952185] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.961605] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e538-1a44-a28d-bff1-77ee6c045fbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.076168] env[62109]: DEBUG nova.network.neutron [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.108590] env[62109]: DEBUG nova.compute.manager [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Received event network-vif-plugged-14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.108859] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.109103] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.109283] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.109460] env[62109]: DEBUG nova.compute.manager [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] No waiting events found dispatching network-vif-plugged-14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 922.109641] env[62109]: WARNING nova.compute.manager [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Received unexpected event network-vif-plugged-14e9f4a1-8980-4de2-88f7-dd0162687351 for instance with vm_state building and task_state spawning. [ 922.109938] env[62109]: DEBUG nova.compute.manager [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Received event network-changed-14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.110141] env[62109]: DEBUG nova.compute.manager [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Refreshing instance network info cache due to event network-changed-14e9f4a1-8980-4de2-88f7-dd0162687351. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 922.110373] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.176917] env[62109]: DEBUG nova.network.neutron [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [{"id": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "address": "fa:16:3e:fc:3a:44", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70216814-67", "ovs_interfaceid": "70216814-67e6-4c4a-80a6-94f8cf8dd246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.292885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.474s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.293196] env[62109]: INFO nova.compute.manager [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Migrating [ 922.300022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.635s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.300265] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.302538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.670s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.302852] env[62109]: DEBUG nova.objects.instance [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lazy-loading 'resources' on Instance uuid ac068268-1243-466e-8cd5-1ee2bc248ecd {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.332434] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.336280] env[62109]: INFO nova.scheduler.client.report [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Deleted allocations for instance 32cccd30-278c-48b6-8855-5cd76c2da057 [ 922.466840] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5290e538-1a44-a28d-bff1-77ee6c045fbd, 'name': SearchDatastore_Task, 'duration_secs': 0.009658} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.467231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.467521] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.467845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.468082] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.468333] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.468725] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df7d2a60-2db1-426a-9230-c6f946153f85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.486143] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.486382] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 922.487355] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706c06df-aa67-45e1-8503-e78c7d9903b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.494360] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 922.494360] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d99f2c-2336-c60c-4e70-84b86ca232bf" [ 922.494360] env[62109]: _type = "Task" [ 922.494360] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.503307] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d99f2c-2336-c60c-4e70-84b86ca232bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.577667] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.578101] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Instance network_info: |[{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 922.578464] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.578726] env[62109]: DEBUG nova.network.neutron [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Refreshing network info cache for port 14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 922.580189] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:3e:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14e9f4a1-8980-4de2-88f7-dd0162687351', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.588196] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating folder: Project (c9e5867b8b7e4ed18c5395baf46db66f). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 922.589329] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2259e386-3757-42f9-8446-d042fe457070 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.603954] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created folder: Project (c9e5867b8b7e4ed18c5395baf46db66f) in parent group-v244329. [ 922.604579] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating folder: Instances. Parent ref: group-v244488. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 922.604777] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b439f29f-ea23-470a-ba5a-b0cb8077b77b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.618115] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created folder: Instances in parent group-v244488. [ 922.618419] env[62109]: DEBUG oslo.service.loopingcall [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.618747] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 922.619080] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c7cecf9-008a-4b9b-8e90-26b9a8659b60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.642697] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.642697] env[62109]: value = "task-1116792" [ 922.642697] env[62109]: _type = "Task" [ 922.642697] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.653708] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116792, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.680271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-66bbe1e6-e5ee-46a0-b95c-449eef636509" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.680585] env[62109]: DEBUG nova.objects.instance [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'migration_context' on Instance uuid 66bbe1e6-e5ee-46a0-b95c-449eef636509 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.814442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.814658] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.814779] env[62109]: DEBUG nova.network.neutron [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 922.834063] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.846166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9b54eb05-a26d-4318-bd99-d0472178bdd9 tempest-MigrationsAdminTest-769684774 tempest-MigrationsAdminTest-769684774-project-member] Lock "32cccd30-278c-48b6-8855-5cd76c2da057" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.395s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.007871] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d99f2c-2336-c60c-4e70-84b86ca232bf, 'name': SearchDatastore_Task, 'duration_secs': 0.077175} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.008887] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-028e02f2-5870-46dc-a737-a23658f7502e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.018880] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 923.018880] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524c4404-8bfe-dbbc-aa44-923c6b4b6ea6" [ 923.018880] env[62109]: _type = "Task" [ 923.018880] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.032198] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524c4404-8bfe-dbbc-aa44-923c6b4b6ea6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.161286] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116792, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.184169] env[62109]: DEBUG nova.objects.base [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Object Instance<66bbe1e6-e5ee-46a0-b95c-449eef636509> lazy-loaded attributes: info_cache,migration_context {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 923.187924] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f4ead5-72f7-4275-bee3-4afff6a4b667 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.215028] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb9321e-51ff-4cc1-9781-e385e87e1425 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.224078] env[62109]: DEBUG oslo_vmware.api [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 923.224078] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5283ccba-d2b3-168a-03b7-3fc1f89f88e1" [ 923.224078] env[62109]: _type = "Task" [ 923.224078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.237261] env[62109]: DEBUG oslo_vmware.api [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5283ccba-d2b3-168a-03b7-3fc1f89f88e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.337462] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.339737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66f075d-d85b-40d7-a8f8-b5fc0cf16cd5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.350617] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2e7eaa-b856-4b63-95e1-6246cd02f235 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.391386] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b5f56b-66f4-4e7a-a19d-b5cbf15ddcf8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.401671] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305cd1ce-134e-41d6-a5c3-9f81cc42cf92 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.421840] env[62109]: DEBUG nova.compute.provider_tree [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.441271] env[62109]: DEBUG nova.network.neutron [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updated VIF entry in instance network info cache for port 14e9f4a1-8980-4de2-88f7-dd0162687351. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 923.441737] env[62109]: DEBUG nova.network.neutron [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.532830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.533235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.541874] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524c4404-8bfe-dbbc-aa44-923c6b4b6ea6, 'name': SearchDatastore_Task, 'duration_secs': 0.085271} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.544925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.545351] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/39c17e34-c8c0-4a66-8d22-717efcb984bc.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 923.546452] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-059e65ab-2fed-45ef-bfb7-351e4efaeed4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.561483] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 923.561483] env[62109]: value = "task-1116793" [ 923.561483] env[62109]: _type = "Task" [ 923.561483] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.572918] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.656274] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116792, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.707685] env[62109]: DEBUG nova.network.neutron [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.733928] env[62109]: DEBUG oslo_vmware.api [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5283ccba-d2b3-168a-03b7-3fc1f89f88e1, 'name': SearchDatastore_Task, 'duration_secs': 0.045038} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.734987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.835564] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.927981] env[62109]: DEBUG nova.scheduler.client.report [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.946047] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb66ec9c-d9c3-4042-9fdd-bd1c859bdeb1 req-0419f5ff-2211-4257-afb2-09ba667ad9b8 service nova] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.036148] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 924.074824] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.155118] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116792, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.211980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.335516] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.433243] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.435907] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.702s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.436394] env[62109]: DEBUG nova.objects.instance [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lazy-loading 'resources' on Instance uuid 3e641c90-2358-4a1c-9af5-6ad96f722aba {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.471493] env[62109]: INFO nova.scheduler.client.report [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleted allocations for instance ac068268-1243-466e-8cd5-1ee2bc248ecd [ 924.563433] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.575205] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.655624] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116792, 'name': CreateVM_Task, 'duration_secs': 1.737789} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.655835] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 924.656629] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.656808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.657200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.657490] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a168a74d-d932-4e6f-b8c8-df71ce7eee12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.667358] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 924.667358] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b7f24d-7bde-c2c5-d641-05655f0e0a80" [ 924.667358] env[62109]: _type = "Task" [ 924.667358] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.682177] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b7f24d-7bde-c2c5-d641-05655f0e0a80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.835786] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116789, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.523109} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.836139] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ee68a607-5d22-48e9-a6df-4918df414238/ee68a607-5d22-48e9-a6df-4918df414238.vmdk to [datastore1] 128ae6c9-1f82-4c67-83be-42cb554c2fd3/128ae6c9-1f82-4c67-83be-42cb554c2fd3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 924.836985] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2860ca08-fcb4-484d-9524-e383f8e4781c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.860798] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 128ae6c9-1f82-4c67-83be-42cb554c2fd3/128ae6c9-1f82-4c67-83be-42cb554c2fd3.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.861281] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-785b1cb2-0603-4769-8472-536b7b82c292 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.882323] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 924.882323] env[62109]: value = "task-1116794" [ 924.882323] env[62109]: _type = "Task" [ 924.882323] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.890783] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.979533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc5cc09c-dbc2-4733-b036-02b47aef916b tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "ac068268-1243-466e-8cd5-1ee2bc248ecd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.113s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.075474] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116793, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.170485} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.075938] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/39c17e34-c8c0-4a66-8d22-717efcb984bc.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 925.076093] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.079190] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d1c1a67-f4e5-4172-9c1e-8cc08f58457c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.087055] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 925.087055] env[62109]: value = "task-1116795" [ 925.087055] env[62109]: _type = "Task" [ 925.087055] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.097676] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.180856] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b7f24d-7bde-c2c5-d641-05655f0e0a80, 'name': SearchDatastore_Task, 'duration_secs': 0.044669} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.181746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.181746] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.181746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.181746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.181964] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.182701] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33866813-fe59-410e-9249-e41f87f2c05f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.192973] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.192973] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 925.193245] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1137532-8124-4c65-bebe-5e73eabde0ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.198963] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 925.198963] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525b938b-9c26-0fec-2a97-ead0102c0aca" [ 925.198963] env[62109]: _type = "Task" [ 925.198963] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.214724] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525b938b-9c26-0fec-2a97-ead0102c0aca, 'name': SearchDatastore_Task, 'duration_secs': 0.008898} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.217426] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c937bcb-c555-4caa-ab86-d487d2ea2b43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.229691] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 925.229691] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dbe36f-c102-0f8d-43db-422dfd60e53d" [ 925.229691] env[62109]: _type = "Task" [ 925.229691] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.242052] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dbe36f-c102-0f8d-43db-422dfd60e53d, 'name': SearchDatastore_Task, 'duration_secs': 0.009264} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.242414] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.243192] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 925.243192] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56280188-3ddb-4847-9660-00c2b9583209 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.251156] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 925.251156] env[62109]: value = "task-1116796" [ 925.251156] env[62109]: _type = "Task" [ 925.251156] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.261403] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.333696] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9769c7-5d6a-4912-a6b0-bf811064e8dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.345682] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a0d116-2a20-4694-9f76-3dbb5ec0687c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.383980] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e161ea-9f97-40b7-b5ea-552b1a35118e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.399963] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116794, 'name': ReconfigVM_Task, 'duration_secs': 0.28151} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.403045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2a6acb-96f2-47ad-931e-c7490cde28c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.408023] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 128ae6c9-1f82-4c67-83be-42cb554c2fd3/128ae6c9-1f82-4c67-83be-42cb554c2fd3.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.408023] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c19528d-0a0d-4888-bba0-88b314862b6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.421522] env[62109]: DEBUG nova.compute.provider_tree [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.424297] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 925.424297] env[62109]: value = "task-1116797" [ 925.424297] env[62109]: _type = "Task" [ 925.424297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.435744] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116797, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.598172] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078341} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.598466] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.599380] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2df7045-ba77-44d2-a70b-3477b31aace5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.626034] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/39c17e34-c8c0-4a66-8d22-717efcb984bc.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.626034] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a5bc7b1-3a84-412c-82df-ce12a39b7484 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.645969] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 925.645969] env[62109]: value = "task-1116798" [ 925.645969] env[62109]: _type = "Task" [ 925.645969] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.657008] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.729888] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379a53e0-a3b4-4796-af77-13bd85f9df1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.754375] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 925.768175] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44295} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.768464] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 925.768720] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.769122] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c9f497f-ebd2-457c-b602-d1d8866ac64f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.778081] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 925.778081] env[62109]: value = "task-1116799" [ 925.778081] env[62109]: _type = "Task" [ 925.778081] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.790213] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.839107] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "5bea4229-6182-445e-b569-e7413ce92b93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.842598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.842598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "5bea4229-6182-445e-b569-e7413ce92b93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.842598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.842598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.842598] env[62109]: INFO nova.compute.manager [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Terminating instance [ 925.844379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "refresh_cache-5bea4229-6182-445e-b569-e7413ce92b93" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.844547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquired lock "refresh_cache-5bea4229-6182-445e-b569-e7413ce92b93" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.844720] env[62109]: DEBUG nova.network.neutron [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 925.925877] env[62109]: DEBUG nova.scheduler.client.report [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.943700] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116797, 'name': Rename_Task, 'duration_secs': 0.412773} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.944036] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 925.944285] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f67d7374-b005-45bc-bacd-c402c855dcb8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.952718] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 925.952718] env[62109]: value = "task-1116800" [ 925.952718] env[62109]: _type = "Task" [ 925.952718] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.961343] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116800, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.158771] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116798, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.264453] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 926.265444] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dd822cb-9991-4510-a5d8-0e4e7011a340 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.273506] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 926.273506] env[62109]: value = "task-1116801" [ 926.273506] env[62109]: _type = "Task" [ 926.273506] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.295120] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 926.296901] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 926.306319] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082701} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.306648] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.309125] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6215187b-f7f2-4688-ba4b-3650e62cdf80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.333356] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.333884] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e26c324-f0b9-47e3-902c-f3687f7d975d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.358501] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 926.358501] env[62109]: value = "task-1116802" [ 926.358501] env[62109]: _type = "Task" [ 926.358501] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.369721] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.373897] env[62109]: DEBUG nova.network.neutron [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.442023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.442023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.667s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.442584] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.442584] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 926.442742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.068s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.444840] env[62109]: INFO nova.compute.claims [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.448914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6185583e-3c51-4ea5-8a2d-fbbb1a0c6c4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.468325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49b2879-1fab-4e13-96e7-c7ee3c9241ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.473074] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116800, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.487370] env[62109]: INFO nova.scheduler.client.report [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleted allocations for instance 3e641c90-2358-4a1c-9af5-6ad96f722aba [ 926.492561] env[62109]: DEBUG nova.network.neutron [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.495831] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328bef97-d6ec-45be-9d90-2781d768634a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.508026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaaa1b15-c0c0-492a-86aa-6a3a5283b1f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.545317] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179448MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 926.545430] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.658829] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116798, 'name': ReconfigVM_Task, 'duration_secs': 0.56534} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.658829] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/39c17e34-c8c0-4a66-8d22-717efcb984bc.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.659372] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19096e8e-e3a6-4259-a7f8-f86eca6c1559 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.668095] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 926.668095] env[62109]: value = "task-1116803" [ 926.668095] env[62109]: _type = "Task" [ 926.668095] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.677280] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116803, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.802142] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.802463] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.802672] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.803152] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.803152] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.803264] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.803515] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.803716] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.803950] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.804347] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.804347] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.809891] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1451413f-cbce-4820-9654-1ece9097fdd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.828755] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 926.828755] env[62109]: value = "task-1116804" [ 926.828755] env[62109]: _type = "Task" [ 926.828755] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.838366] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116804, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.870533] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.976184] env[62109]: DEBUG oslo_vmware.api [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116800, 'name': PowerOnVM_Task, 'duration_secs': 0.935238} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.977263] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 926.977632] env[62109]: INFO nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Took 15.15 seconds to spawn the instance on the hypervisor. [ 926.977702] env[62109]: DEBUG nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.978916] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79c862d-c9fa-4aaa-aa4a-c05f9cec7b78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.002480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Releasing lock "refresh_cache-5bea4229-6182-445e-b569-e7413ce92b93" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.003743] env[62109]: DEBUG nova.compute.manager [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 927.004716] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.008021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-66b563e1-29c2-4786-b0c3-b90764ec436c tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "3e641c90-2358-4a1c-9af5-6ad96f722aba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.677s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.008938] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70a9247-f3f4-48cf-ae36-b329c30bf55e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.018348] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 927.018700] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46968154-bb09-4d91-b81e-99d293772950 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.032333] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 927.032333] env[62109]: value = "task-1116805" [ 927.032333] env[62109]: _type = "Task" [ 927.032333] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.042903] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.177864] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116803, 'name': Rename_Task, 'duration_secs': 0.161065} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.178252] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 927.178513] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28ec7279-6bfc-4669-9a94-f0bc1f50386a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.185499] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 927.185499] env[62109]: value = "task-1116806" [ 927.185499] env[62109]: _type = "Task" [ 927.185499] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.194648] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116806, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.343735] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116804, 'name': ReconfigVM_Task, 'duration_secs': 0.178799} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.343735] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 927.375167] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116802, 'name': ReconfigVM_Task, 'duration_secs': 0.677071} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.375638] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.376738] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67a0e281-1052-444d-bbfd-64dbe6031d30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.388391] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 927.388391] env[62109]: value = "task-1116807" [ 927.388391] env[62109]: _type = "Task" [ 927.388391] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.403268] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116807, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.509780] env[62109]: INFO nova.compute.manager [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Took 42.07 seconds to build instance. [ 927.550674] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116805, 'name': PowerOffVM_Task, 'duration_secs': 0.150018} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.553636] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 927.553818] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 927.554310] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7f15601-bd28-4872-8b7e-aedc86205d29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.592135] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 927.592135] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 927.592135] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleting the datastore file [datastore1] 5bea4229-6182-445e-b569-e7413ce92b93 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.592135] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f7b43ec-4efc-4092-b7bd-0f2273284bb0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.600572] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for the task: (returnval){ [ 927.600572] env[62109]: value = "task-1116809" [ 927.600572] env[62109]: _type = "Task" [ 927.600572] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.614631] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.697218] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116806, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.856065] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.856338] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.856504] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.856724] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.856875] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.857043] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.857253] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.857492] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.857669] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.857866] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.858070] env[62109]: DEBUG nova.virt.hardware [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.867874] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 927.867874] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef21d6d7-51b4-4554-9056-7a6d4a5a684c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.882045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bf65c6-b2be-4523-a787-273750ee29b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.894128] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374502d6-75a2-40ce-ba13-9f4a8cf320a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.898208] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 927.898208] env[62109]: value = "task-1116810" [ 927.898208] env[62109]: _type = "Task" [ 927.898208] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.945730] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116807, 'name': Rename_Task, 'duration_secs': 0.245565} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.947465] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 927.948717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0b5743-eb17-435a-8c7b-9e72bb2966c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.955730] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b905ec2-d438-4944-87b0-289d13af9020 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.957793] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116810, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.965414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcf4d10-8afc-4bd9-86d8-e51b4664eff5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.969529] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 927.969529] env[62109]: value = "task-1116811" [ 927.969529] env[62109]: _type = "Task" [ 927.969529] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.969920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.983555] env[62109]: DEBUG nova.compute.provider_tree [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.990994] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116811, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.014457] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0e1a66b6-2043-4539-ba4e-74a00909b355 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.589s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.014719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.045s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.014968] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.015367] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.015464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.017709] env[62109]: INFO nova.compute.manager [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Terminating instance [ 928.019580] env[62109]: DEBUG nova.compute.manager [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 928.019770] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.020649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b79248-9fc9-418a-963e-51de1c33ba06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.029383] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 928.029657] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-420c63d4-1e3f-4b3c-b117-ba670e063b7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.036528] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 928.036528] env[62109]: value = "task-1116812" [ 928.036528] env[62109]: _type = "Task" [ 928.036528] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.044784] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.112662] env[62109]: DEBUG oslo_vmware.api [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Task: {'id': task-1116809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.113078] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.116021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 928.116021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 928.116021] env[62109]: INFO nova.compute.manager [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Took 1.11 seconds to destroy the instance on the hypervisor. [ 928.116021] env[62109]: DEBUG oslo.service.loopingcall [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.116021] env[62109]: DEBUG nova.compute.manager [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.116021] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.141627] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.198247] env[62109]: DEBUG oslo_vmware.api [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116806, 'name': PowerOnVM_Task, 'duration_secs': 0.643422} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.198578] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 928.198840] env[62109]: INFO nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Took 10.87 seconds to spawn the instance on the hypervisor. [ 928.199069] env[62109]: DEBUG nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 928.199935] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b6c76-8fb2-4ad0-99d8-a865deec9591 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.409838] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116810, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.482280] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116811, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.487063] env[62109]: DEBUG nova.scheduler.client.report [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.546624] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116812, 'name': PowerOffVM_Task, 'duration_secs': 0.258371} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.546927] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 928.547098] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 928.547353] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab492251-29fe-4328-84ae-b7527aa304ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.620778] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 928.621027] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 928.621226] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleting the datastore file [datastore1] 128ae6c9-1f82-4c67-83be-42cb554c2fd3 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.621493] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0285b27-e317-427f-a8c5-8a925dc7f25f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.628111] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 928.628111] env[62109]: value = "task-1116814" [ 928.628111] env[62109]: _type = "Task" [ 928.628111] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.636588] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.642485] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.720132] env[62109]: INFO nova.compute.manager [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Took 40.38 seconds to build instance. [ 928.911295] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116810, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.982579] env[62109]: DEBUG oslo_vmware.api [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116811, 'name': PowerOnVM_Task, 'duration_secs': 0.994325} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.982875] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 928.983100] env[62109]: INFO nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Took 9.15 seconds to spawn the instance on the hypervisor. [ 928.983291] env[62109]: DEBUG nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 928.984070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73b8c77-098b-4edb-9aee-a179f3bd3d28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.996163] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.996163] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.996617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.638s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.997133] env[62109]: DEBUG nova.objects.instance [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lazy-loading 'resources' on Instance uuid 6f31405e-a766-46da-8bf9-7be37a323bf3 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.137738] env[62109]: DEBUG oslo_vmware.api [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3123} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.138011] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.138254] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 929.138506] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 929.138746] env[62109]: INFO nova.compute.manager [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 929.139012] env[62109]: DEBUG oslo.service.loopingcall [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.139224] env[62109]: DEBUG nova.compute.manager [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 929.139320] env[62109]: DEBUG nova.network.neutron [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.144365] env[62109]: INFO nova.compute.manager [-] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Took 1.03 seconds to deallocate network for instance. [ 929.222568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98c3a6ae-851c-4907-ab71-491ca2c53c92 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.892s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.410102] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116810, 'name': ReconfigVM_Task, 'duration_secs': 1.283902} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.410102] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 929.411050] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b2a616-da73-4f0c-8c84-afc81a128c2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.437466] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.437808] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2912392a-6081-4166-9036-6a01ae51ca1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.457374] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 929.457374] env[62109]: value = "task-1116815" [ 929.457374] env[62109]: _type = "Task" [ 929.457374] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.465703] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.499402] env[62109]: DEBUG nova.compute.utils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.509472] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 929.509553] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.511625] env[62109]: INFO nova.compute.manager [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Took 40.49 seconds to build instance. [ 929.561379] env[62109]: DEBUG nova.policy [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 929.652355] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.775153] env[62109]: DEBUG nova.compute.manager [req-a2df82b3-8344-4bf9-ab85-f3c0e51b7cb3 req-17a1ad7d-e85d-404c-9085-5d7c36b51e2e service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Received event network-vif-deleted-a374ac72-8bf4-4d62-9108-eb77cdb6e2d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.775153] env[62109]: INFO nova.compute.manager [req-a2df82b3-8344-4bf9-ab85-f3c0e51b7cb3 req-17a1ad7d-e85d-404c-9085-5d7c36b51e2e service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Neutron deleted interface a374ac72-8bf4-4d62-9108-eb77cdb6e2d6; detaching it from the instance and deleting it from the info cache [ 929.775153] env[62109]: DEBUG nova.network.neutron [req-a2df82b3-8344-4bf9-ab85-f3c0e51b7cb3 req-17a1ad7d-e85d-404c-9085-5d7c36b51e2e service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.864562] env[62109]: DEBUG nova.network.neutron [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.908826] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c43ff3a-83d1-4837-9655-717d5318b380 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.917795] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27723162-0a0f-497e-a5bd-2a53d6ad0f26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.952627] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Successfully created port: b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.955060] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8515c89-6ca0-4a65-8d9a-4a3d184223c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.964401] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b68d5e9-423c-4c16-9871-a5084557eed1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.971064] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116815, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.980494] env[62109]: DEBUG nova.compute.provider_tree [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.997762] env[62109]: DEBUG nova.compute.manager [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-changed-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.997972] env[62109]: DEBUG nova.compute.manager [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing instance network info cache due to event network-changed-63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.998207] env[62109]: DEBUG oslo_concurrency.lockutils [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.998356] env[62109]: DEBUG oslo_concurrency.lockutils [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.998521] env[62109]: DEBUG nova.network.neutron [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 930.017303] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 930.020245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ef116a0f-d264-4012-ad4f-994b7d7446f4 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.009s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.243651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.243976] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.278634] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81be3f06-95f9-4b0d-b315-e34b876f57ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.292487] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03f8800-2da4-4031-bea8-a0a31cbffc89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.331482] env[62109]: DEBUG nova.compute.manager [req-a2df82b3-8344-4bf9-ab85-f3c0e51b7cb3 req-17a1ad7d-e85d-404c-9085-5d7c36b51e2e service nova] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Detach interface failed, port_id=a374ac72-8bf4-4d62-9108-eb77cdb6e2d6, reason: Instance 128ae6c9-1f82-4c67-83be-42cb554c2fd3 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 930.366939] env[62109]: INFO nova.compute.manager [-] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Took 1.23 seconds to deallocate network for instance. [ 930.473275] env[62109]: DEBUG oslo_vmware.api [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116815, 'name': ReconfigVM_Task, 'duration_secs': 0.574363} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.473742] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2/8b63f9a1-5639-48b2-b0a9-30380835bef2.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.474155] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 930.483178] env[62109]: DEBUG nova.scheduler.client.report [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.752764] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 930.801012] env[62109]: DEBUG nova.network.neutron [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updated VIF entry in instance network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 930.801428] env[62109]: DEBUG nova.network.neutron [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.875276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.984250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e4f51f-7245-42c5-bd48-7886284375cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.991896] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.010826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.114s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.012067] env[62109]: DEBUG nova.objects.instance [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'resources' on Instance uuid 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.014718] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de2d9b7-7f82-42bd-9aaf-b95fd6974827 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.040820] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 931.043903] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 931.047690] env[62109]: INFO nova.scheduler.client.report [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Deleted allocations for instance 6f31405e-a766-46da-8bf9-7be37a323bf3 [ 931.086986] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.088039] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.088039] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.088039] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.088262] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.088346] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.089025] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.089254] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.090030] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.090030] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.090159] env[62109]: DEBUG nova.virt.hardware [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.090971] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa7e5da-b59d-493b-82e0-e2fa50138789 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.103017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a0911b-a67c-40ba-91f1-d534afcf5f2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.277453] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.304248] env[62109]: DEBUG oslo_concurrency.lockutils [req-af7a882d-b8a3-485c-ae36-b388830b5d8a req-4ba12a40-074a-4cda-b29f-8be91bdc5fec service nova] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.471913] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Successfully updated port: b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.558447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b9caef4b-ceea-4fec-bd64-8580a8fea529 tempest-ServerPasswordTestJSON-365268983 tempest-ServerPasswordTestJSON-365268983-project-member] Lock "6f31405e-a766-46da-8bf9-7be37a323bf3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.434s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.600912] env[62109]: DEBUG nova.network.neutron [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Port 7c4891b0-c525-4571-aa3b-47cc9a42d8ac binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 931.800573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2be7b9-a414-4aff-a80c-569f29155785 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.810499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fef72e-bffb-44e0-a75d-ba55ed360513 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.817714] env[62109]: DEBUG nova.compute.manager [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Received event network-vif-plugged-b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.818048] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.818390] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.818583] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.818845] env[62109]: DEBUG nova.compute.manager [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] No waiting events found dispatching network-vif-plugged-b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 931.819107] env[62109]: WARNING nova.compute.manager [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Received unexpected event network-vif-plugged-b4e51bf8-f6dd-4890-81ac-da83edf6812c for instance with vm_state building and task_state spawning. [ 931.819362] env[62109]: DEBUG nova.compute.manager [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Received event network-changed-b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.819592] env[62109]: DEBUG nova.compute.manager [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Refreshing instance network info cache due to event network-changed-b4e51bf8-f6dd-4890-81ac-da83edf6812c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 931.819852] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Acquiring lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.820065] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Acquired lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.820296] env[62109]: DEBUG nova.network.neutron [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Refreshing network info cache for port b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 931.852566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add76616-c481-4fe6-a357-bec66c25388a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.861139] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02be4dca-f978-48e8-bb4c-5cc8a6cc8803 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.875477] env[62109]: DEBUG nova.compute.provider_tree [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.973243] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.085134] env[62109]: DEBUG nova.compute.manager [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Received event network-changed-14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.085361] env[62109]: DEBUG nova.compute.manager [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Refreshing instance network info cache due to event network-changed-14e9f4a1-8980-4de2-88f7-dd0162687351. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.085573] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.085718] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.086447] env[62109]: DEBUG nova.network.neutron [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Refreshing network info cache for port 14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 932.358992] env[62109]: DEBUG nova.network.neutron [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 932.379426] env[62109]: DEBUG nova.scheduler.client.report [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 932.453150] env[62109]: DEBUG nova.network.neutron [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.627656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.627928] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.628150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.831246] env[62109]: DEBUG nova.network.neutron [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updated VIF entry in instance network info cache for port 14e9f4a1-8980-4de2-88f7-dd0162687351. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 932.831622] env[62109]: DEBUG nova.network.neutron [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.884929] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.874s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.888543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.964s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.888816] env[62109]: DEBUG nova.objects.instance [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lazy-loading 'resources' on Instance uuid 028300fd-f9f8-4606-a39e-53582f830eeb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.911906] env[62109]: INFO nova.scheduler.client.report [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d [ 932.956944] env[62109]: DEBUG oslo_concurrency.lockutils [req-bc163d01-ac7b-4bad-89fa-a19226f30c3a req-ba996c39-a5aa-4e10-b41c-fa3bf4c2c211 service nova] Releasing lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.958118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.960064] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 933.337860] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1cb3be9-fc73-484b-ad2c-be672ea170fd req-f48c0875-ac8b-46a0-8395-9d12c7694bcb service nova] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.419516] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e5c4505c-202d-4bf3-b332-730e6aca5d4c tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "448371eb-c1dd-4d7b-b946-aaf6c3a3a36d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.039s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.501046] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 933.675538] env[62109]: DEBUG nova.network.neutron [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating instance_info_cache with network_info: [{"id": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "address": "fa:16:3e:1d:b2:83", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4e51bf8-f6", "ovs_interfaceid": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.675538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.675538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.675538] env[62109]: DEBUG nova.network.neutron [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 933.753272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903931de-1912-48e3-9680-cfec9b781175 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.761325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3224eb6c-9345-483a-9bc6-22beb7fe2f41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.793267] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2cf321-911e-4244-92a3-88f29889361a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.800875] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2066f431-21ee-4707-b91a-c4d9feec31a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.813926] env[62109]: DEBUG nova.compute.provider_tree [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.177827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.178279] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance network_info: |[{"id": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "address": "fa:16:3e:1d:b2:83", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4e51bf8-f6", "ovs_interfaceid": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 934.180934] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:b2:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4e51bf8-f6dd-4890-81ac-da83edf6812c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.188296] env[62109]: DEBUG oslo.service.loopingcall [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.188536] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 934.189142] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72ef299b-8c4b-49a4-8c43-da78dbcdc1a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.210516] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.210516] env[62109]: value = "task-1116816" [ 934.210516] env[62109]: _type = "Task" [ 934.210516] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.220506] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116816, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.317246] env[62109]: DEBUG nova.scheduler.client.report [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.328359] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.328620] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.435817] env[62109]: DEBUG nova.network.neutron [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.720013] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116816, 'name': CreateVM_Task, 'duration_secs': 0.323234} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.720201] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 934.720866] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.721051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.721381] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.721630] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3627aca-9e6e-423b-a636-248419b824ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.726932] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 934.726932] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5227fcfc-a744-e7e7-0fbd-ce02b840155f" [ 934.726932] env[62109]: _type = "Task" [ 934.726932] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.733943] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5227fcfc-a744-e7e7-0fbd-ce02b840155f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.825772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.936s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.827834] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.538s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.829028] env[62109]: INFO nova.compute.claims [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.831634] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 934.851292] env[62109]: INFO nova.scheduler.client.report [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Deleted allocations for instance 028300fd-f9f8-4606-a39e-53582f830eeb [ 934.942135] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.237522] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5227fcfc-a744-e7e7-0fbd-ce02b840155f, 'name': SearchDatastore_Task, 'duration_secs': 0.046648} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.237853] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.238111] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.238355] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.238554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.238676] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.238986] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df4937cf-9ebf-44c5-b766-38056d0e4063 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.252188] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.252381] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 935.253123] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16a3e6e7-a216-4ed3-a2a3-0f324107d92d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.258523] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 935.258523] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b8a845-2ec0-7c8d-6d5a-fd7146e02402" [ 935.258523] env[62109]: _type = "Task" [ 935.258523] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.265963] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b8a845-2ec0-7c8d-6d5a-fd7146e02402, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.351189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.358652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-432af254-289b-48b0-9012-3d58f39d4de8 tempest-VolumesAdminNegativeTest-40114712 tempest-VolumesAdminNegativeTest-40114712-project-member] Lock "028300fd-f9f8-4606-a39e-53582f830eeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.701s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.467253] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bb332b-9d4a-4bb0-b539-80905180dcb3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.488508] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148340ca-fba6-4b49-82d1-b7f11b38d46a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.496606] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 935.768996] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b8a845-2ec0-7c8d-6d5a-fd7146e02402, 'name': SearchDatastore_Task, 'duration_secs': 0.049643} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.769915] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6fd9c09-7be3-4916-85d9-83a4ac1231cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.775462] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 935.775462] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52753682-07c3-fb6c-254b-f3317877ff14" [ 935.775462] env[62109]: _type = "Task" [ 935.775462] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.782976] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52753682-07c3-fb6c-254b-f3317877ff14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.003037] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32c21a11-9291-4843-8aae-9ef3fed4751d tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance '8b63f9a1-5639-48b2-b0a9-30380835bef2' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 936.238918] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66934a5-24ab-4d3a-948e-51763f290f3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.246811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9038c46-5d5d-4428-b488-40ff10ea9700 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.276522] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb17f6d-77df-4fe1-a85c-dbc6cda196bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.290011] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d851b024-9b51-4f3b-818d-f6cbc72a14b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.293792] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52753682-07c3-fb6c-254b-f3317877ff14, 'name': SearchDatastore_Task, 'duration_secs': 0.031677} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.294077] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.294344] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 936.294952] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9244739-35f5-4750-b7ae-49e9dc005bfc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.305871] env[62109]: DEBUG nova.compute.provider_tree [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.312371] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 936.312371] env[62109]: value = "task-1116817" [ 936.312371] env[62109]: _type = "Task" [ 936.312371] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.321149] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.809902] env[62109]: DEBUG nova.scheduler.client.report [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.824993] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116817, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.318804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.319359] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 937.321914] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.260s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.323346] env[62109]: INFO nova.compute.claims [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.331621] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617219} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.331863] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 937.332101] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.332351] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-147139fa-b0a8-40a1-9dfa-5434c2fd4427 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.339198] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 937.339198] env[62109]: value = "task-1116818" [ 937.339198] env[62109]: _type = "Task" [ 937.339198] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.346823] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.831407] env[62109]: DEBUG nova.compute.utils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.833041] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 937.852579] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067349} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.853177] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.853790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f872c0d7-f375-43fe-9ef4-80b4b9e4e556 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.878773] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.879263] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4efa1cc2-b5ec-42f2-8a59-7f17a4a3bfaf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.899901] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 937.899901] env[62109]: value = "task-1116819" [ 937.899901] env[62109]: _type = "Task" [ 937.899901] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.911921] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.337721] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 938.418290] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.754882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cb4f26-7a64-4311-b79f-316ca8dc1d1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.773091] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149a3aa2-24c3-4d58-b4ca-313eee80cb16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.825950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec6c9f4-f860-4dc8-8516-ba7f4efe2846 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.839052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de72ac61-95c7-46fa-9266-9d7ad9591a9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.869351] env[62109]: DEBUG nova.compute.provider_tree [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 938.911882] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116819, 'name': ReconfigVM_Task, 'duration_secs': 0.602039} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.912191] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.913021] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcfc83a4-a15f-42f6-988c-2920da6fdd1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.919917] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 938.919917] env[62109]: value = "task-1116820" [ 938.919917] env[62109]: _type = "Task" [ 938.919917] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.927425] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116820, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.152273] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.152273] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.152273] env[62109]: DEBUG nova.compute.manager [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Going to confirm migration 3 {{(pid=62109) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 939.353664] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 939.380256] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 939.380487] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 939.380649] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.380837] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 939.380987] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.381257] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 939.381410] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 939.381538] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 939.381679] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 939.381844] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 939.382033] env[62109]: DEBUG nova.virt.hardware [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 939.382963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02d0f78-65a5-41cf-b445-f806e486dd1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.390844] env[62109]: ERROR nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [req-e0b37d92-506a-4009-87fd-34155da6504c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e0b37d92-506a-4009-87fd-34155da6504c"}]} [ 939.392209] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df657af-e2c5-46d2-8509-0fc5d6285ca5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.407621] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.413381] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Creating folder: Project (2e0c6c44a8f34a879305f910e62720da). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 939.414401] env[62109]: DEBUG nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 939.416150] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7cf4405-609f-465d-98ed-796c9ca10d80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.425932] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Created folder: Project (2e0c6c44a8f34a879305f910e62720da) in parent group-v244329. [ 939.426145] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Creating folder: Instances. Parent ref: group-v244492. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 939.429300] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9413c06-2c9c-4e04-8977-56c0147c38ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.430780] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116820, 'name': Rename_Task, 'duration_secs': 0.144086} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.431966] env[62109]: DEBUG nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 939.431966] env[62109]: DEBUG nova.compute.provider_tree [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.433710] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 939.434808] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08deadc3-7939-4b5c-818f-994e623fadfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.440188] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 939.440188] env[62109]: value = "task-1116823" [ 939.440188] env[62109]: _type = "Task" [ 939.440188] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.444938] env[62109]: DEBUG nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 939.446762] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Created folder: Instances in parent group-v244492. [ 939.447041] env[62109]: DEBUG oslo.service.loopingcall [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.447774] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 939.447897] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b25e6f2-4a12-4532-9722-b4a05315128c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.463009] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.465815] env[62109]: DEBUG nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 939.469181] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.469181] env[62109]: value = "task-1116824" [ 939.469181] env[62109]: _type = "Task" [ 939.469181] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.477224] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116824, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.735586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.736192] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.736192] env[62109]: DEBUG nova.network.neutron [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 939.736192] env[62109]: DEBUG nova.objects.instance [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'info_cache' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.776168] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633cf214-5646-43ab-b2a6-33b7f7c71434 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.783925] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab12a618-bae2-4859-93ee-1ba4693da389 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.814730] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a7c311-49f9-4181-81b4-dfd71bf37de8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.822475] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b64a57-53ea-4bd9-a19f-8dceabd569c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.835863] env[62109]: DEBUG nova.compute.provider_tree [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.949552] env[62109]: DEBUG oslo_vmware.api [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116823, 'name': PowerOnVM_Task, 'duration_secs': 0.469711} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.949852] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 939.950100] env[62109]: INFO nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Took 8.91 seconds to spawn the instance on the hypervisor. [ 939.950301] env[62109]: DEBUG nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 939.951070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e97ae0-3c23-4cde-832c-feb6f2dce701 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.978158] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116824, 'name': CreateVM_Task, 'duration_secs': 0.24695} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.978331] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 939.978753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.978950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.979335] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.979547] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04aeef4a-c54a-4ee1-a15a-0f74c1629e4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.983914] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 939.983914] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52280e6a-e56a-b570-059e-98f715f67901" [ 939.983914] env[62109]: _type = "Task" [ 939.983914] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.991554] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52280e6a-e56a-b570-059e-98f715f67901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.368342] env[62109]: DEBUG nova.scheduler.client.report [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 109 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 940.368342] env[62109]: DEBUG nova.compute.provider_tree [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 109 to 110 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 940.368750] env[62109]: DEBUG nova.compute.provider_tree [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 940.469094] env[62109]: INFO nova.compute.manager [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Took 46.11 seconds to build instance. [ 940.496723] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52280e6a-e56a-b570-059e-98f715f67901, 'name': SearchDatastore_Task, 'duration_secs': 0.013124} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.497083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.497318] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.497550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.497699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.497878] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.498256] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8e5c00-c3f0-436a-9114-b023ec45586b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.506526] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.506726] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 940.507464] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a243f51-0b16-4fdd-8c02-2b36876e3b94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.512403] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 940.512403] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525851b2-d4de-98e4-3eb0-9049c3cc788f" [ 940.512403] env[62109]: _type = "Task" [ 940.512403] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.520449] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525851b2-d4de-98e4-3eb0-9049c3cc788f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.792200] env[62109]: DEBUG nova.compute.manager [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Received event network-changed-b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.792393] env[62109]: DEBUG nova.compute.manager [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Refreshing instance network info cache due to event network-changed-b4e51bf8-f6dd-4890-81ac-da83edf6812c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.792607] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] Acquiring lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.792746] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] Acquired lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.793464] env[62109]: DEBUG nova.network.neutron [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Refreshing network info cache for port b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 940.874066] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.552s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.874554] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 940.877179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.805s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.877409] env[62109]: DEBUG nova.objects.instance [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lazy-loading 'resources' on Instance uuid 5c7dbe04-5027-49cd-a478-79046fee1f16 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.970806] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e8385b28-9d48-4aca-8607-befd523ab2ce tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.624s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.973328] env[62109]: DEBUG nova.network.neutron [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.023133] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525851b2-d4de-98e4-3eb0-9049c3cc788f, 'name': SearchDatastore_Task, 'duration_secs': 0.008373} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.024042] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706c4909-8189-4309-afc6-9c901c1e097d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.029612] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 941.029612] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216589d-944e-f76f-5271-525f080021a3" [ 941.029612] env[62109]: _type = "Task" [ 941.029612] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.038147] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216589d-944e-f76f-5271-525f080021a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.381053] env[62109]: DEBUG nova.compute.utils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 941.385972] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 941.385972] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 941.448616] env[62109]: DEBUG nova.policy [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a65ab55ee66140f2a825e4347258d12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57a1783401e34096b84023fc70da3840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 941.476998] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.477272] env[62109]: DEBUG nova.objects.instance [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'migration_context' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.539095] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5216589d-944e-f76f-5271-525f080021a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.541479] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.541747] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 941.542417] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89faa814-24f7-4653-acc2-458b99d575dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.549164] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 941.549164] env[62109]: value = "task-1116825" [ 941.549164] env[62109]: _type = "Task" [ 941.549164] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.563241] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.619384] env[62109]: DEBUG nova.network.neutron [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updated VIF entry in instance network info cache for port b4e51bf8-f6dd-4890-81ac-da83edf6812c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 941.619905] env[62109]: DEBUG nova.network.neutron [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating instance_info_cache with network_info: [{"id": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "address": "fa:16:3e:1d:b2:83", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4e51bf8-f6", "ovs_interfaceid": "b4e51bf8-f6dd-4890-81ac-da83edf6812c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.773239] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6d12a9-99c1-447d-b2ff-9d0618fcdf8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.782087] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de5ae01-0ab3-4204-ad62-e3c6f1f229f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.825853] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Successfully created port: f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.830450] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21d5379-8744-4f06-967c-44813cf2c23b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.840435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37649cd3-0d5c-4f5c-b13b-aef57fece3d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.855835] env[62109]: DEBUG nova.compute.provider_tree [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.886212] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 941.984045] env[62109]: DEBUG nova.objects.base [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Object Instance<8b63f9a1-5639-48b2-b0a9-30380835bef2> lazy-loaded attributes: info_cache,migration_context {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 941.984382] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e643b912-c61a-4b24-87ad-6fe80c5b2b03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.004334] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0ae1931-dd98-4c0b-a9ba-f58b5bf5217f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.009442] env[62109]: DEBUG oslo_vmware.api [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 942.009442] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52964ab9-85bd-94e7-a74f-59da83fd6121" [ 942.009442] env[62109]: _type = "Task" [ 942.009442] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.017136] env[62109]: DEBUG oslo_vmware.api [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52964ab9-85bd-94e7-a74f-59da83fd6121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.061669] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458095} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.061937] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 942.062174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.062426] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe007648-acd3-4327-b23c-ee2e045a5d3a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.068866] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 942.068866] env[62109]: value = "task-1116826" [ 942.068866] env[62109]: _type = "Task" [ 942.068866] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.076013] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.122917] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec43752b-875d-4b3b-bbb7-ece0fc93a2b6 req-3c83fb82-5b57-435b-bc49-68e946f7c7b8 service nova] Releasing lock "refresh_cache-5842e112-d3ef-4ce9-91cc-198e68d12422" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.358964] env[62109]: DEBUG nova.scheduler.client.report [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.520481] env[62109]: DEBUG oslo_vmware.api [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52964ab9-85bd-94e7-a74f-59da83fd6121, 'name': SearchDatastore_Task, 'duration_secs': 0.014122} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.520851] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.578145] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05983} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.578424] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.579225] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15b7039-c03a-4e9f-9f50-a01928b27c0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.598189] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.598495] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cb7d744-fe9d-42ed-9993-9320860f5bcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.618366] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 942.618366] env[62109]: value = "task-1116827" [ 942.618366] env[62109]: _type = "Task" [ 942.618366] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.626336] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116827, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.866096] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.987s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.867367] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.919s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.867715] env[62109]: DEBUG nova.objects.instance [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lazy-loading 'resources' on Instance uuid f91f4482-b18d-4883-9f6b-3bc5a386eedd {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.887029] env[62109]: INFO nova.scheduler.client.report [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleted allocations for instance 5c7dbe04-5027-49cd-a478-79046fee1f16 [ 942.895972] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 942.921712] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.921941] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.922123] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.922317] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.922787] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.922787] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.922942] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.923050] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.923251] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.923439] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.923637] env[62109]: DEBUG nova.virt.hardware [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.925029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dc33d9-1797-47cf-98ab-c44fe80d15de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.933182] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1401d46f-a73f-495f-b074-d54ab0fb56cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.128477] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.232288] env[62109]: DEBUG nova.compute.manager [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Received event network-vif-plugged-f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.232508] env[62109]: DEBUG oslo_concurrency.lockutils [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.232721] env[62109]: DEBUG oslo_concurrency.lockutils [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.232916] env[62109]: DEBUG oslo_concurrency.lockutils [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.233134] env[62109]: DEBUG nova.compute.manager [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] No waiting events found dispatching network-vif-plugged-f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.233317] env[62109]: WARNING nova.compute.manager [req-3358e8f7-3433-4123-a873-1ad102e1a9d0 req-e1e92cb4-eeef-4634-8e4e-9073e53a8f7c service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Received unexpected event network-vif-plugged-f9409e40-51f1-46fc-b21a-1c43e176574f for instance with vm_state building and task_state spawning. [ 943.319693] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Successfully updated port: f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 943.395136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c4535a50-82d0-4609-a71c-71f37784d49d tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "5c7dbe04-5027-49cd-a478-79046fee1f16" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.311s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.624848] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b98b8b2-83ba-4306-a176-9709623c9b74 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.634613] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17149e95-cfe0-43f8-9b06-3da526fb408e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.639771] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116827, 'name': ReconfigVM_Task, 'duration_secs': 0.970411} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.640155] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Reconfigured VM instance instance-00000052 to attach disk [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.641038] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2952120-4dff-41e3-a346-9f71087e7818 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.669589] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b348d8-7f1f-4723-a8a0-fc6c25de23cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.672175] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 943.672175] env[62109]: value = "task-1116828" [ 943.672175] env[62109]: _type = "Task" [ 943.672175] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.679723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5a531f-4b66-48de-b2b6-1279efaf897a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.687376] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116828, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.697842] env[62109]: DEBUG nova.compute.provider_tree [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.822785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.823029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.823266] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.185193] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116828, 'name': Rename_Task, 'duration_secs': 0.440736} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.185659] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 944.186043] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f2851fd-5740-4ba1-b064-f7aa17369714 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.193684] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 944.193684] env[62109]: value = "task-1116829" [ 944.193684] env[62109]: _type = "Task" [ 944.193684] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.204351] env[62109]: DEBUG nova.scheduler.client.report [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.207936] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.363759] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.514592] env[62109]: DEBUG nova.network.neutron [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating instance_info_cache with network_info: [{"id": "f9409e40-51f1-46fc-b21a-1c43e176574f", "address": "fa:16:3e:9d:62:c6", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9409e40-51", "ovs_interfaceid": "f9409e40-51f1-46fc-b21a-1c43e176574f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.705871] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.708753] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.711287] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.120s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.712878] env[62109]: INFO nova.compute.claims [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.732373] env[62109]: INFO nova.scheduler.client.report [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Deleted allocations for instance f91f4482-b18d-4883-9f6b-3bc5a386eedd [ 945.016961] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.017271] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Instance network_info: |[{"id": "f9409e40-51f1-46fc-b21a-1c43e176574f", "address": "fa:16:3e:9d:62:c6", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9409e40-51", "ovs_interfaceid": "f9409e40-51f1-46fc-b21a-1c43e176574f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 945.018020] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:62:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9409e40-51f1-46fc-b21a-1c43e176574f', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.025664] env[62109]: DEBUG oslo.service.loopingcall [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.025916] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 945.026541] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1070c9e-4877-4cbe-815f-34e83aeb4bce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.047315] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.047315] env[62109]: value = "task-1116830" [ 945.047315] env[62109]: _type = "Task" [ 945.047315] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.055356] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116830, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.205285] env[62109]: DEBUG oslo_vmware.api [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116829, 'name': PowerOnVM_Task, 'duration_secs': 0.57894} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.205613] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 945.205824] env[62109]: INFO nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Took 5.85 seconds to spawn the instance on the hypervisor. [ 945.206015] env[62109]: DEBUG nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.207158] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c118c5c9-f564-4501-80aa-22e539524e52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.239949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ca6488c-fb47-4f5e-ba6c-31efd91dfd18 tempest-ServerTagsTestJSON-1404980418 tempest-ServerTagsTestJSON-1404980418-project-member] Lock "f91f4482-b18d-4883-9f6b-3bc5a386eedd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.353s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.347912] env[62109]: DEBUG nova.compute.manager [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Received event network-changed-f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.348184] env[62109]: DEBUG nova.compute.manager [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Refreshing instance network info cache due to event network-changed-f9409e40-51f1-46fc-b21a-1c43e176574f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.348438] env[62109]: DEBUG oslo_concurrency.lockutils [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] Acquiring lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.348622] env[62109]: DEBUG oslo_concurrency.lockutils [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] Acquired lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.348820] env[62109]: DEBUG nova.network.neutron [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Refreshing network info cache for port f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.557844] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116830, 'name': CreateVM_Task, 'duration_secs': 0.312941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.558090] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 945.558824] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.559024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.559373] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.559603] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d252a2-05a1-444b-aca3-e5b749295bfe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.564316] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 945.564316] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e12a4-6442-5b2a-818d-2fd387afc382" [ 945.564316] env[62109]: _type = "Task" [ 945.564316] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.572138] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e12a4-6442-5b2a-818d-2fd387afc382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.576592] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "7ace6356-1a81-4095-8286-c9b6d829062b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.576797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.577270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.577270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.577401] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.579400] env[62109]: INFO nova.compute.manager [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Terminating instance [ 945.586053] env[62109]: DEBUG nova.compute.manager [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 945.586280] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 945.587070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38552d73-45cf-473f-8bd2-086b48012991 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.593978] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 945.594229] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9fd6243-c019-45a6-99db-c78eaba16729 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.601109] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 945.601109] env[62109]: value = "task-1116831" [ 945.601109] env[62109]: _type = "Task" [ 945.601109] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.612368] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.733239] env[62109]: INFO nova.compute.manager [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Took 34.47 seconds to build instance. [ 946.050660] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812b4a4e-a07d-401c-867e-97f7082610f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.060372] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c90a97-39ce-4cd3-82a7-c1e6cffb7dfa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.090941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c758c9-b24f-4357-a675-1abd2da107d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.099041] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e12a4-6442-5b2a-818d-2fd387afc382, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.101187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.101465] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.101707] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.101857] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.102105] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.102454] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6812f772-bd2b-4079-bdf0-c64e043a64ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.105148] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1db599-73de-4d79-b0dc-3ec6c614829d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.109928] env[62109]: DEBUG nova.network.neutron [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updated VIF entry in instance network info cache for port f9409e40-51f1-46fc-b21a-1c43e176574f. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 946.110033] env[62109]: DEBUG nova.network.neutron [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating instance_info_cache with network_info: [{"id": "f9409e40-51f1-46fc-b21a-1c43e176574f", "address": "fa:16:3e:9d:62:c6", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9409e40-51", "ovs_interfaceid": "f9409e40-51f1-46fc-b21a-1c43e176574f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.121868] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116831, 'name': PowerOffVM_Task, 'duration_secs': 0.251409} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.130137] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 946.130137] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 946.130534] env[62109]: DEBUG nova.compute.provider_tree [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.134099] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77aaf736-8133-414c-8305-96dbf54a4494 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.134806] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.134986] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 946.136147] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ef4041b-fa15-4e0b-8c72-4a8ea1165e4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.142268] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 946.142268] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ad6152-9f78-d717-96aa-501ffa48f0c9" [ 946.142268] env[62109]: _type = "Task" [ 946.142268] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.151161] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ad6152-9f78-d717-96aa-501ffa48f0c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.200605] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 946.200953] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 946.201245] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleting the datastore file [datastore2] 7ace6356-1a81-4095-8286-c9b6d829062b {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.201616] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce6a390f-06bb-41f4-acfd-426068913d85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.210245] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for the task: (returnval){ [ 946.210245] env[62109]: value = "task-1116833" [ 946.210245] env[62109]: _type = "Task" [ 946.210245] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.230977] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.237053] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b74499c9-bcf2-46d0-87da-77f127feed17 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.976s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.615212] env[62109]: DEBUG oslo_concurrency.lockutils [req-4094e359-094e-4452-bd8c-780b82d202d9 req-bce6610b-172e-4c98-913e-c86cdb007ce9 service nova] Releasing lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.637683] env[62109]: DEBUG nova.scheduler.client.report [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.652749] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ad6152-9f78-d717-96aa-501ffa48f0c9, 'name': SearchDatastore_Task, 'duration_secs': 0.017154} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.653609] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6514ce2-ed57-4163-802e-9c7fc4329303 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.660653] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 946.660653] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5271423d-d862-80bf-c3f4-760e1ca7e80d" [ 946.660653] env[62109]: _type = "Task" [ 946.660653] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.670034] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5271423d-d862-80bf-c3f4-760e1ca7e80d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.719956] env[62109]: DEBUG oslo_vmware.api [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Task: {'id': task-1116833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141744} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.720326] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.720422] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 946.720599] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 946.720779] env[62109]: INFO nova.compute.manager [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 946.721225] env[62109]: DEBUG oslo.service.loopingcall [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.721225] env[62109]: DEBUG nova.compute.manager [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 946.721497] env[62109]: DEBUG nova.network.neutron [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.725936] env[62109]: INFO nova.compute.manager [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Rebuilding instance [ 946.790857] env[62109]: DEBUG nova.compute.manager [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.791746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16351ed3-1309-473b-b4b9-df485a2a1353 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.143253] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.143856] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 947.147170] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 23.413s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.178643] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5271423d-d862-80bf-c3f4-760e1ca7e80d, 'name': SearchDatastore_Task, 'duration_secs': 0.009799} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.178643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.178643] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5/414ac48f-68bc-4d37-98c0-4bcc9f7f37c5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 947.178643] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae19fa74-7bdc-49ab-b166-1af43f549a76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.188344] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 947.188344] env[62109]: value = "task-1116834" [ 947.188344] env[62109]: _type = "Task" [ 947.188344] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.196209] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.304018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 947.304018] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ab25e72-8a23-4fb2-9a58-1c242fc6993d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.310841] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 947.310841] env[62109]: value = "task-1116835" [ 947.310841] env[62109]: _type = "Task" [ 947.310841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.319592] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.384238] env[62109]: DEBUG nova.compute.manager [req-54a2c2c3-f390-4bf3-82a4-67f87e1f1da8 req-30fd4608-f932-4a90-8ec3-25505091f2ad service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Received event network-vif-deleted-a06370a9-effe-4205-85fa-bfa658250da0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.385707] env[62109]: INFO nova.compute.manager [req-54a2c2c3-f390-4bf3-82a4-67f87e1f1da8 req-30fd4608-f932-4a90-8ec3-25505091f2ad service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Neutron deleted interface a06370a9-effe-4205-85fa-bfa658250da0; detaching it from the instance and deleting it from the info cache [ 947.386033] env[62109]: DEBUG nova.network.neutron [req-54a2c2c3-f390-4bf3-82a4-67f87e1f1da8 req-30fd4608-f932-4a90-8ec3-25505091f2ad service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.609180] env[62109]: DEBUG nova.network.neutron [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.651210] env[62109]: DEBUG nova.compute.utils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.653391] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 947.653391] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 947.700952] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116834, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452677} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.701259] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5/414ac48f-68bc-4d37-98c0-4bcc9f7f37c5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 947.701555] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.703635] env[62109]: DEBUG nova.policy [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.706279] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a35c98a-91b6-4b54-914e-afacdc3b0d7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.718307] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 947.718307] env[62109]: value = "task-1116836" [ 947.718307] env[62109]: _type = "Task" [ 947.718307] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.727482] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.823780] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116835, 'name': PowerOffVM_Task, 'duration_secs': 0.153598} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.824122] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 947.824420] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 947.825223] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2721a8-1f3d-4e45-a33b-af394664faf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.835387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 947.835663] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7316db2f-4afc-4c03-bb4e-d53600210595 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.860895] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 947.860991] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 947.861172] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Deleting the datastore file [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.861470] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e83fda6f-c121-44d8-bb7c-85f3c8fb17ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.868207] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 947.868207] env[62109]: value = "task-1116838" [ 947.868207] env[62109]: _type = "Task" [ 947.868207] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.876014] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.890413] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-949ed33d-4551-4055-bdc0-8a159cf53e6c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.910027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1a02bd-78a5-4c7f-ae44-0ecc5508818b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.948808] env[62109]: DEBUG nova.compute.manager [req-54a2c2c3-f390-4bf3-82a4-67f87e1f1da8 req-30fd4608-f932-4a90-8ec3-25505091f2ad service nova] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Detach interface failed, port_id=a06370a9-effe-4205-85fa-bfa658250da0, reason: Instance 7ace6356-1a81-4095-8286-c9b6d829062b could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 947.986768] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c29bd5-0d68-480f-a0f8-e9652321fbc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.994294] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1465ebbf-8b7b-42b5-8edc-24d26f282dd0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.028488] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Successfully created port: f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.031073] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe59f6c5-9e6d-4984-b6a5-820d43eb2c70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.039513] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4705e9e9-124c-40b0-8b9a-4c87a0b3acc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.053470] env[62109]: DEBUG nova.compute.provider_tree [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.112563] env[62109]: INFO nova.compute.manager [-] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Took 1.39 seconds to deallocate network for instance. [ 948.159771] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 948.229717] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070299} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.230107] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.231612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121900a3-4549-44c4-a2d0-0c088905d47d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.255831] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5/414ac48f-68bc-4d37-98c0-4bcc9f7f37c5.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.256253] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64796829-1962-4050-8c9f-ffcf9579aed0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.279368] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 948.279368] env[62109]: value = "task-1116839" [ 948.279368] env[62109]: _type = "Task" [ 948.279368] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.287777] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.377854] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101499} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.377854] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.378286] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 948.378573] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 948.557173] env[62109]: DEBUG nova.scheduler.client.report [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.619872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.790635] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116839, 'name': ReconfigVM_Task, 'duration_secs': 0.311749} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.790913] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5/414ac48f-68bc-4d37-98c0-4bcc9f7f37c5.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.791567] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ecc17c0-54c7-429a-93b7-80b14636e972 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.799143] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 948.799143] env[62109]: value = "task-1116840" [ 948.799143] env[62109]: _type = "Task" [ 948.799143] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.806951] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116840, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.176882] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 949.207154] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.207448] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.207596] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.207784] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.207931] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.208655] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.208970] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.209187] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.209365] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.209535] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.209710] env[62109]: DEBUG nova.virt.hardware [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.210692] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0273581-8b7f-409f-9a39-2307b0c576e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.219520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053d99c3-6344-442e-be60-46aeb8c80d44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.310357] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116840, 'name': Rename_Task, 'duration_secs': 0.14961} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.310674] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 949.310929] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91472623-0720-4fde-9837-e3671c33c096 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.318456] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 949.318456] env[62109]: value = "task-1116841" [ 949.318456] env[62109]: _type = "Task" [ 949.318456] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.328231] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.418937] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.419269] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.419437] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.419787] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.420028] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.420196] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.420416] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.420582] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.420754] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.420930] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.421166] env[62109]: DEBUG nova.virt.hardware [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.423555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd71a96-542d-440e-bc3f-3afa63238950 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.430821] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afa4c60-dddf-4075-9c08-6b5013ef8910 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.447951] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.454969] env[62109]: DEBUG oslo.service.loopingcall [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.455589] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 949.455789] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-706381ac-1c11-41be-b38b-5bdcc1f7bfbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.476248] env[62109]: DEBUG nova.compute.manager [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-vif-plugged-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.476389] env[62109]: DEBUG oslo_concurrency.lockutils [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.476606] env[62109]: DEBUG oslo_concurrency.lockutils [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.476797] env[62109]: DEBUG oslo_concurrency.lockutils [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.476970] env[62109]: DEBUG nova.compute.manager [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] No waiting events found dispatching network-vif-plugged-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 949.477157] env[62109]: WARNING nova.compute.manager [req-d177175c-7ee1-4d44-a152-3d6f220a2b85 req-85e5671a-14cb-4200-8d35-ece350e0bd9a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received unexpected event network-vif-plugged-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d for instance with vm_state building and task_state spawning. [ 949.483633] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.483633] env[62109]: value = "task-1116842" [ 949.483633] env[62109]: _type = "Task" [ 949.483633] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.492662] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116842, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.568140] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Successfully updated port: f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.571203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.424s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.574448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.011s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.577598] env[62109]: INFO nova.compute.claims [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.829747] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.994519] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116842, 'name': CreateVM_Task, 'duration_secs': 0.313009} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.994693] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 949.995131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.995340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.995643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.996224] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74970f9b-030c-4d23-abd2-d1212bf9b521 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.000999] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 950.000999] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d2011c-7dc6-e966-1cf9-34d62a2ff9bb" [ 950.000999] env[62109]: _type = "Task" [ 950.000999] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.008382] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d2011c-7dc6-e966-1cf9-34d62a2ff9bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.075261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.075535] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.076672] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.142402] env[62109]: INFO nova.scheduler.client.report [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocation for migration c4667422-9473-4783-af9c-f6de4a4209a6 [ 950.332805] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.514012] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d2011c-7dc6-e966-1cf9-34d62a2ff9bb, 'name': SearchDatastore_Task, 'duration_secs': 0.01536} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.514221] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.514278] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.514505] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.514659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.514939] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.515160] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c687ccc7-7262-458c-9fb3-5d9afa23ecec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.523684] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.523852] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 950.524675] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90909ca6-d982-43fb-8811-b1714f6cf63e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.529698] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 950.529698] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1e310-3e0e-a280-d351-65d472be9b87" [ 950.529698] env[62109]: _type = "Task" [ 950.529698] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.538513] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1e310-3e0e-a280-d351-65d472be9b87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.612427] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 950.651228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-31601a6d-621b-4b36-9127-380994705814 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 30.606s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.795546] env[62109]: DEBUG nova.network.neutron [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.833737] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.910568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8ff9b0-a731-42ca-92c2-79c3717992f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.919236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32b5a68-da7f-4005-a2be-74d6dc73b300 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.958820] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2783d027-cf77-403a-9ba3-a6cbdf3187d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.967450] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989842a2-58dd-431f-bb12-6d3eaae178e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.984351] env[62109]: DEBUG nova.compute.provider_tree [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.042176] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a1e310-3e0e-a280-d351-65d472be9b87, 'name': SearchDatastore_Task, 'duration_secs': 0.010962} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.044815] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7809378a-4481-457a-b2e3-a4a0850bba91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.049931] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 951.049931] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528c2b02-9f79-99aa-b189-330650122d6c" [ 951.049931] env[62109]: _type = "Task" [ 951.049931] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.057211] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528c2b02-9f79-99aa-b189-330650122d6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.212075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.212315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.300589] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.300920] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Instance network_info: |[{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 951.301381] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:aa:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b8137fc-f23d-49b1-b19c-3123a5588f34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8fbaff0-8e7e-4f1c-9709-51d00228bc0d', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.310210] env[62109]: DEBUG oslo.service.loopingcall [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.310438] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 951.310664] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c1e5997-c456-4e86-a72f-1fb3bd51e784 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.334915] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.335995] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.335995] env[62109]: value = "task-1116843" [ 951.335995] env[62109]: _type = "Task" [ 951.335995] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.343568] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116843, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.488413] env[62109]: DEBUG nova.scheduler.client.report [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.499910] env[62109]: DEBUG nova.compute.manager [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.500159] env[62109]: DEBUG nova.compute.manager [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 951.500384] env[62109]: DEBUG oslo_concurrency.lockutils [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.500522] env[62109]: DEBUG oslo_concurrency.lockutils [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.501094] env[62109]: DEBUG nova.network.neutron [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 951.560083] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528c2b02-9f79-99aa-b189-330650122d6c, 'name': SearchDatastore_Task, 'duration_secs': 0.00852} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.560302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.560531] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 951.560781] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c099306a-eb93-46f3-bc87-29e682181d01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.566782] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 951.566782] env[62109]: value = "task-1116844" [ 951.566782] env[62109]: _type = "Task" [ 951.566782] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.574812] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.715501] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 951.835369] env[62109]: DEBUG oslo_vmware.api [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116841, 'name': PowerOnVM_Task, 'duration_secs': 2.418814} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.835661] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 951.835871] env[62109]: INFO nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Took 8.94 seconds to spawn the instance on the hypervisor. [ 951.836094] env[62109]: DEBUG nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 951.836923] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2e85d4-5700-4603-9a82-52599902a6c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.851895] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116843, 'name': CreateVM_Task, 'duration_secs': 0.331396} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.852124] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 951.852790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.852957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.853295] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.853545] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88497427-8c89-4105-9a61-5a09fea8ce9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.857893] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 951.857893] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5281b7aa-b4a8-7252-0905-5eac67fd6c96" [ 951.857893] env[62109]: _type = "Task" [ 951.857893] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.865598] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5281b7aa-b4a8-7252-0905-5eac67fd6c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.994042] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.994825] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 951.998561] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.453s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.078794] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116844, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.248620] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.255614] env[62109]: DEBUG nova.network.neutron [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 952.255614] env[62109]: DEBUG nova.network.neutron [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.363675] env[62109]: INFO nova.compute.manager [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Took 40.32 seconds to build instance. [ 952.373981] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5281b7aa-b4a8-7252-0905-5eac67fd6c96, 'name': SearchDatastore_Task, 'duration_secs': 0.015577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.373981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.373981] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.373981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.373981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.373981] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.373981] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7cc90b3f-db2b-4371-9d34-ce2593e7ada7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.383719] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.383913] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 952.384998] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-619dc532-5bfa-41b4-8269-555bda8dd19a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.390775] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 952.390775] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a0264-bdcf-2fea-ef17-bb93758f454c" [ 952.390775] env[62109]: _type = "Task" [ 952.390775] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.398316] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a0264-bdcf-2fea-ef17-bb93758f454c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.503101] env[62109]: DEBUG nova.compute.utils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.518965] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 952.519290] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 952.579517] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676439} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.579868] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 952.580591] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.580591] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-419d3d1a-b76e-4bd2-98dc-f7919e977384 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.587812] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 952.587812] env[62109]: value = "task-1116845" [ 952.587812] env[62109]: _type = "Task" [ 952.587812] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.596138] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.601058] env[62109]: DEBUG nova.policy [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1da9d51bcc574df7aa69b59f2018d389', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bff923ccb02449aa834523a0652cbdb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 952.664083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.664512] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.664626] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.664813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.664986] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.668690] env[62109]: INFO nova.compute.manager [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Terminating instance [ 952.671065] env[62109]: DEBUG nova.compute.manager [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.671164] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.671925] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab1aaa9-11af-4dd2-8648-2766197ad5fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.679713] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 952.680017] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed237f5f-5f91-4c1b-aab1-dccdd82bb8cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.688393] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 952.688393] env[62109]: value = "task-1116846" [ 952.688393] env[62109]: _type = "Task" [ 952.688393] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.695253] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.758543] env[62109]: DEBUG oslo_concurrency.lockutils [req-e66c8647-c303-436e-b8e1-a7ce9372c562 req-191b0c86-9b71-4cae-8492-cfec164fb495 service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.866428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3eb8fc6-f328-40ea-bc30-46f26d6e0aff tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.827s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.900995] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529a0264-bdcf-2fea-ef17-bb93758f454c, 'name': SearchDatastore_Task, 'duration_secs': 0.013147} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.901836] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ecaf1c-6221-4491-9a10-2637c237c50e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.907268] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 952.907268] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525882c9-9cac-02be-4065-b21b47dc5b39" [ 952.907268] env[62109]: _type = "Task" [ 952.907268] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.915246] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525882c9-9cac-02be-4065-b21b47dc5b39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.019231] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.023058] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Applying migration context for instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 as it has an incoming, in-progress migration a1545213-e534-441e-8d4e-0d5c5aa97374. Migration status is confirming {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 953.025687] env[62109]: INFO nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating resource usage from migration a1545213-e534-441e-8d4e-0d5c5aa97374 [ 953.048091] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048091] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048091] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 7ace6356-1a81-4095-8286-c9b6d829062b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 953.048091] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048322] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048322] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0392a352-74e5-4551-9319-eebbc5e20d3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048415] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 5bea4229-6182-445e-b569-e7413ce92b93 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 953.048450] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 66bbe1e6-e5ee-46a0-b95c-449eef636509 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048716] env[62109]: WARNING nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 128ae6c9-1f82-4c67-83be-42cb554c2fd3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 953.048802] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 39c17e34-c8c0-4a66-8d22-717efcb984bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.048867] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.049060] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Migration a1545213-e534-441e-8d4e-0d5c5aa97374 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 953.049139] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.049235] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 5842e112-d3ef-4ce9-91cc-198e68d12422 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.049345] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance e7e232c4-a2cb-44eb-8ee3-11fc12ee152a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.049455] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.050796] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 6b5a009e-28f5-4be7-8641-089abe359954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.051021] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a197a73e-32bc-45b0-ae6f-5275cf74285b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.100880] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089067} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.101134] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.102129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3759de4-5e77-4767-84c5-68efab471629 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.126405] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.126405] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baef4a10-57f0-442e-a76f-671bb63c265c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.145115] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Successfully created port: 98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.148913] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 953.148913] env[62109]: value = "task-1116847" [ 953.148913] env[62109]: _type = "Task" [ 953.148913] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.160387] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116847, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.200032] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116846, 'name': PowerOffVM_Task, 'duration_secs': 0.195019} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.200032] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.200032] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.200032] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1f5fcdc-aa60-40f9-8685-5e4a8ada5b77 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.263866] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.263866] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.264172] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore1] 66bbe1e6-e5ee-46a0-b95c-449eef636509 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.264709] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79c42e6a-b1fa-4ea0-b1b6-bc7538c121ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.274016] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 953.274016] env[62109]: value = "task-1116849" [ 953.274016] env[62109]: _type = "Task" [ 953.274016] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.283793] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.295183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.295582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.418586] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525882c9-9cac-02be-4065-b21b47dc5b39, 'name': SearchDatastore_Task, 'duration_secs': 0.071482} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.418880] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.419101] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 6b5a009e-28f5-4be7-8641-089abe359954/6b5a009e-28f5-4be7-8641-089abe359954.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 953.419432] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fef33074-1562-4b26-ad4b-68fc7427032f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.426466] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 953.426466] env[62109]: value = "task-1116850" [ 953.426466] env[62109]: _type = "Task" [ 953.426466] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.436721] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.525558] env[62109]: DEBUG nova.compute.manager [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Received event network-changed-f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.525886] env[62109]: DEBUG nova.compute.manager [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Refreshing instance network info cache due to event network-changed-f9409e40-51f1-46fc-b21a-1c43e176574f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.526170] env[62109]: DEBUG oslo_concurrency.lockutils [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] Acquiring lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.526329] env[62109]: DEBUG oslo_concurrency.lockutils [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] Acquired lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.526494] env[62109]: DEBUG nova.network.neutron [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Refreshing network info cache for port f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 953.554982] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 50c93e9e-5af6-489e-ac8a-29b8a6777a4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 953.659946] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116847, 'name': ReconfigVM_Task, 'duration_secs': 0.312323} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.660325] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Reconfigured VM instance instance-00000052 to attach disk [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a/e7e232c4-a2cb-44eb-8ee3-11fc12ee152a.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.660905] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45e9a7d8-2d93-4e9f-a914-afa6429d3e80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.667506] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 953.667506] env[62109]: value = "task-1116851" [ 953.667506] env[62109]: _type = "Task" [ 953.667506] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.675569] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116851, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.782766] env[62109]: DEBUG oslo_vmware.api [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.4313} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.783105] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.783316] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 953.783534] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 953.783724] env[62109]: INFO nova.compute.manager [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Took 1.11 seconds to destroy the instance on the hypervisor. [ 953.783978] env[62109]: DEBUG oslo.service.loopingcall [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.784455] env[62109]: DEBUG nova.compute.manager [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 953.784532] env[62109]: DEBUG nova.network.neutron [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 953.801157] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 953.939458] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116850, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.036467] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.057344] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 35a13db2-f645-4634-86e0-7e9a6a24fc66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 954.070959] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.070959] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.071251] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.071397] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.071553] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.071704] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.071977] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.072352] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.072352] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.072498] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.072720] env[62109]: DEBUG nova.virt.hardware [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.073836] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04c5425-582b-4972-86db-cb294fb3f40e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.086530] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585296a3-4142-43df-9731-8101a33e6cb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.183422] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116851, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.308854] env[62109]: DEBUG nova.network.neutron [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updated VIF entry in instance network info cache for port f9409e40-51f1-46fc-b21a-1c43e176574f. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 954.309304] env[62109]: DEBUG nova.network.neutron [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating instance_info_cache with network_info: [{"id": "f9409e40-51f1-46fc-b21a-1c43e176574f", "address": "fa:16:3e:9d:62:c6", "network": {"id": "97410b64-2638-4108-b740-0518f3eea13f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-366497996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57a1783401e34096b84023fc70da3840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9409e40-51", "ovs_interfaceid": "f9409e40-51f1-46fc-b21a-1c43e176574f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.325350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.440441] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116850, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.529762] env[62109]: DEBUG nova.network.neutron [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.560672] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 954.674210] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Successfully updated port: 98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.681374] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116851, 'name': Rename_Task, 'duration_secs': 0.958347} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.681426] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 954.681696] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aae312e4-72c2-4ab6-90b5-9ee930651a3e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.688078] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 954.688078] env[62109]: value = "task-1116852" [ 954.688078] env[62109]: _type = "Task" [ 954.688078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.696016] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.812316] env[62109]: DEBUG oslo_concurrency.lockutils [req-19674d43-07c6-4539-a692-911f3fe75cea req-5a8f02cc-0a07-4b53-9f70-97d0e45f52fd service nova] Releasing lock "refresh_cache-414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.938739] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116850, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.034284] env[62109]: INFO nova.compute.manager [-] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Took 1.25 seconds to deallocate network for instance. [ 955.063210] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dfebeee8-06be-424b-89b0-7c1a3d4703eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 955.063557] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 955.063645] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3520MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 955.176706] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.176849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.176989] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 955.198302] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116852, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.336917] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9deb27a0-3d8a-47d3-b1fa-9232cf4cd219 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.347343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82e4215-bb3d-4fef-93be-4de841deb7e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.377675] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83241eee-74c2-4116-ac61-9ed7af643500 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.385435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478aa86d-53fc-4b59-84b7-29b3aaa25bb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.398841] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.440872] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116850, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.920199} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.441304] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 6b5a009e-28f5-4be7-8641-089abe359954/6b5a009e-28f5-4be7-8641-089abe359954.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 955.441409] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 955.441643] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8eed1d76-6f74-4ee6-89e2-d56f604be17d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.447857] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 955.447857] env[62109]: value = "task-1116853" [ 955.447857] env[62109]: _type = "Task" [ 955.447857] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.456717] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.541859] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.553025] env[62109]: DEBUG nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Received event network-vif-deleted-70216814-67e6-4c4a-80a6-94f8cf8dd246 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.553264] env[62109]: DEBUG nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Received event network-vif-plugged-98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.553449] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.553655] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.553826] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.554133] env[62109]: DEBUG nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] No waiting events found dispatching network-vif-plugged-98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 955.555013] env[62109]: WARNING nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Received unexpected event network-vif-plugged-98e332f4-3bb6-4be2-b072-a11329289f58 for instance with vm_state building and task_state spawning. [ 955.555013] env[62109]: DEBUG nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Received event network-changed-98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.555013] env[62109]: DEBUG nova.compute.manager [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Refreshing instance network info cache due to event network-changed-98e332f4-3bb6-4be2-b072-a11329289f58. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 955.555013] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Acquiring lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.699920] env[62109]: DEBUG oslo_vmware.api [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116852, 'name': PowerOnVM_Task, 'duration_secs': 0.832408} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.700265] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 955.700479] env[62109]: DEBUG nova.compute.manager [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.701256] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfef6aa6-f563-4418-bc17-1378820aae18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.709983] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 955.841594] env[62109]: DEBUG nova.network.neutron [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating instance_info_cache with network_info: [{"id": "98e332f4-3bb6-4be2-b072-a11329289f58", "address": "fa:16:3e:92:f4:d4", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98e332f4-3b", "ovs_interfaceid": "98e332f4-3bb6-4be2-b072-a11329289f58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.919104] env[62109]: ERROR nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [req-500c454f-bbcf-486c-8a50-962027950533] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-500c454f-bbcf-486c-8a50-962027950533"}]} [ 955.936599] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 955.957114] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 955.957114] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.963998] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063496} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.964682] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 955.965330] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7212a2b1-0581-4a63-a36f-accb70824f78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.969918] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 955.989222] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 6b5a009e-28f5-4be7-8641-089abe359954/6b5a009e-28f5-4be7-8641-089abe359954.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.989916] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32edf183-c8b1-4377-924d-e47db256b576 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.004934] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 956.012792] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 956.012792] env[62109]: value = "task-1116854" [ 956.012792] env[62109]: _type = "Task" [ 956.012792] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.020933] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116854, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.217884] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.284271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec245fb-e377-4773-8871-650bb16fdea8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.291605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78818be6-2b73-4725-97bd-157a5099b5a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.320789] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffdf862-0903-475d-86dc-08ac2c47911a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.328164] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ee4c62-33bc-4aa8-8a23-073bf6b6bdde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.342296] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 956.343553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.343827] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Instance network_info: |[{"id": "98e332f4-3bb6-4be2-b072-a11329289f58", "address": "fa:16:3e:92:f4:d4", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98e332f4-3b", "ovs_interfaceid": "98e332f4-3bb6-4be2-b072-a11329289f58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 956.344315] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Acquired lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.344504] env[62109]: DEBUG nova.network.neutron [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Refreshing network info cache for port 98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.346026] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:f4:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98e332f4-3bb6-4be2-b072-a11329289f58', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.353805] env[62109]: DEBUG oslo.service.loopingcall [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.356909] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 956.357402] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9c61976-113e-4e76-a128-17594e225fea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.379083] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.379083] env[62109]: value = "task-1116855" [ 956.379083] env[62109]: _type = "Task" [ 956.379083] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.387511] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116855, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.522800] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116854, 'name': ReconfigVM_Task, 'duration_secs': 0.278724} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.523196] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 6b5a009e-28f5-4be7-8641-089abe359954/6b5a009e-28f5-4be7-8641-089abe359954.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.523843] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fd56bea-ac13-47d2-8be0-31b53a0faaaf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.530536] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 956.530536] env[62109]: value = "task-1116856" [ 956.530536] env[62109]: _type = "Task" [ 956.530536] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.543114] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116856, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.591771] env[62109]: DEBUG nova.network.neutron [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updated VIF entry in instance network info cache for port 98e332f4-3bb6-4be2-b072-a11329289f58. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 956.592189] env[62109]: DEBUG nova.network.neutron [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating instance_info_cache with network_info: [{"id": "98e332f4-3bb6-4be2-b072-a11329289f58", "address": "fa:16:3e:92:f4:d4", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98e332f4-3b", "ovs_interfaceid": "98e332f4-3bb6-4be2-b072-a11329289f58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.876823] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 956.876823] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 113 to 114 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 956.877133] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 956.889716] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116855, 'name': CreateVM_Task, 'duration_secs': 0.332048} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.889876] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 956.890545] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.890709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.891035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.891823] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2536101d-35f2-493f-97a4-89a51418950a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.896919] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 956.896919] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523364d6-ac62-67d0-9ce4-4fdd4d195f50" [ 956.896919] env[62109]: _type = "Task" [ 956.896919] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.904323] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523364d6-ac62-67d0-9ce4-4fdd4d195f50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.040929] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.041205] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.041421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.041646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.041779] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.043398] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116856, 'name': Rename_Task, 'duration_secs': 0.147235} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.043844] env[62109]: INFO nova.compute.manager [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Terminating instance [ 957.045189] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 957.045636] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "refresh_cache-e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.045796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquired lock "refresh_cache-e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.045963] env[62109]: DEBUG nova.network.neutron [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.046828] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aff861d7-351c-49ea-bc28-c9bdafd1d3c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.054590] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 957.054590] env[62109]: value = "task-1116857" [ 957.054590] env[62109]: _type = "Task" [ 957.054590] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.063483] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.095341] env[62109]: DEBUG oslo_concurrency.lockutils [req-c1bf1c6d-1394-416c-84de-7e09dc3e6722 req-12e41b3b-3644-4ad0-8a99-dc5e524c785d service nova] Releasing lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.384508] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 957.384694] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.386s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.384977] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.733s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.385181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.387317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.512s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.387503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.389228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.112s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.391094] env[62109]: INFO nova.compute.claims [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.408837] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523364d6-ac62-67d0-9ce4-4fdd4d195f50, 'name': SearchDatastore_Task, 'duration_secs': 0.010475} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.409157] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.409398] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.409635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.409788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.409968] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.410252] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38acbd84-39ab-48f4-828b-04d3a22d4b03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.417387] env[62109]: INFO nova.scheduler.client.report [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted allocations for instance 128ae6c9-1f82-4c67-83be-42cb554c2fd3 [ 957.418966] env[62109]: INFO nova.scheduler.client.report [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Deleted allocations for instance 5bea4229-6182-445e-b569-e7413ce92b93 [ 957.421330] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.421330] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 957.425497] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0aaf850-82a1-4f28-8d61-92ac363ce1d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.431782] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 957.431782] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5232b49f-aaf1-c5b4-6515-0696919bcf5b" [ 957.431782] env[62109]: _type = "Task" [ 957.431782] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.439137] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5232b49f-aaf1-c5b4-6515-0696919bcf5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.565349] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116857, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.567305] env[62109]: DEBUG nova.network.neutron [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.659553] env[62109]: DEBUG nova.network.neutron [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.932699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e97262d7-9a64-457e-b042-6815acace7f6 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "128ae6c9-1f82-4c67-83be-42cb554c2fd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.918s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.933820] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a55d344b-6574-4f47-a193-8cb280d88714 tempest-ServerShowV247Test-1179013552 tempest-ServerShowV247Test-1179013552-project-member] Lock "5bea4229-6182-445e-b569-e7413ce92b93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.094s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.944825] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5232b49f-aaf1-c5b4-6515-0696919bcf5b, 'name': SearchDatastore_Task, 'duration_secs': 0.008934} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.945617] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b5cf4da-6521-4855-8d81-15d0413be5c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.951833] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 957.951833] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fc861c-cc67-4213-17d6-ffbb87d41653" [ 957.951833] env[62109]: _type = "Task" [ 957.951833] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.960483] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fc861c-cc67-4213-17d6-ffbb87d41653, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.065570] env[62109]: DEBUG oslo_vmware.api [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116857, 'name': PowerOnVM_Task, 'duration_secs': 0.645673} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.065847] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 958.066086] env[62109]: INFO nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Took 8.89 seconds to spawn the instance on the hypervisor. [ 958.066297] env[62109]: DEBUG nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 958.067110] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a4cde6-6554-4e6d-b3b1-18b12fa78650 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.161874] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Releasing lock "refresh_cache-e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.162462] env[62109]: DEBUG nova.compute.manager [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.162567] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 958.163470] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387e5960-5266-45a1-9938-0a7c2d6dac4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.171778] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 958.172050] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73eb705d-9cae-413a-97f5-c9c8e4c69f39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.179176] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 958.179176] env[62109]: value = "task-1116858" [ 958.179176] env[62109]: _type = "Task" [ 958.179176] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.188097] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.462273] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fc861c-cc67-4213-17d6-ffbb87d41653, 'name': SearchDatastore_Task, 'duration_secs': 0.009747} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.462548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.462807] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] a197a73e-32bc-45b0-ae6f-5275cf74285b/a197a73e-32bc-45b0-ae6f-5275cf74285b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 958.463080] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba78e58c-2a60-4570-a1b5-d40f2e1fad1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.470841] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 958.470841] env[62109]: value = "task-1116859" [ 958.470841] env[62109]: _type = "Task" [ 958.470841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.479052] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.585532] env[62109]: INFO nova.compute.manager [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Took 42.01 seconds to build instance. [ 958.688023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509d0f50-3612-40b4-9ea6-259c78b86f75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.702162] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116858, 'name': PowerOffVM_Task, 'duration_secs': 0.117509} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.703381] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 958.703584] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 958.704140] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5edd010-b589-42f4-8ea8-958b51cdae48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.709740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c91838-3f72-44ec-ad4f-c7a7efdeda0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.747279] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7da914-f389-40c4-94c1-f1d90b2b6d43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.750427] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 958.750644] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 958.750827] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Deleting the datastore file [datastore2] e7e232c4-a2cb-44eb-8ee3-11fc12ee152a {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.751116] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47c327c5-4086-4527-ab7a-0f2fd316bb9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.758478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37873f1e-d2d2-4770-b31e-911bccb7d83b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.764697] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for the task: (returnval){ [ 958.764697] env[62109]: value = "task-1116861" [ 958.764697] env[62109]: _type = "Task" [ 958.764697] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.777424] env[62109]: DEBUG nova.compute.provider_tree [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.785941] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.980696] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480086} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.981167] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] a197a73e-32bc-45b0-ae6f-5275cf74285b/a197a73e-32bc-45b0-ae6f-5275cf74285b.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 958.981443] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.981704] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e995fe8-d1e5-4566-8547-f52158a69f7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.990558] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 958.990558] env[62109]: value = "task-1116862" [ 958.990558] env[62109]: _type = "Task" [ 958.990558] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.998633] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.088089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2e84ae10-45bd-44fc-b89e-e430afd8ad18 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.520s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.172975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "0392a352-74e5-4551-9319-eebbc5e20d3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.173247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.173462] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.173650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.173888] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.178579] env[62109]: INFO nova.compute.manager [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Terminating instance [ 959.180762] env[62109]: DEBUG nova.compute.manager [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 959.180956] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 959.181834] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f5e0a9-c557-41b8-bbaf-e41126a8f56a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.190051] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 959.190175] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b44ca5c4-a204-4f1d-88df-ef5384fab461 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.198046] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 959.198046] env[62109]: value = "task-1116863" [ 959.198046] env[62109]: _type = "Task" [ 959.198046] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.206301] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.274055] env[62109]: DEBUG oslo_vmware.api [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Task: {'id': task-1116861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201884} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.274392] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.274517] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 959.274754] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 959.274972] env[62109]: INFO nova.compute.manager [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 959.275264] env[62109]: DEBUG oslo.service.loopingcall [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.275470] env[62109]: DEBUG nova.compute.manager [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 959.275565] env[62109]: DEBUG nova.network.neutron [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 959.283020] env[62109]: DEBUG nova.scheduler.client.report [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.293373] env[62109]: DEBUG nova.network.neutron [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.315146] env[62109]: DEBUG nova.compute.manager [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.317659] env[62109]: DEBUG nova.compute.manager [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 959.317659] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.317659] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.317659] env[62109]: DEBUG nova.network.neutron [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 959.502421] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124851} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.502801] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.503877] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6969168e-f3a2-4579-b383-fcd6e7b11bd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.529956] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] a197a73e-32bc-45b0-ae6f-5275cf74285b/a197a73e-32bc-45b0-ae6f-5275cf74285b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.531032] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f8a8534-9f11-46ea-bebd-016b447ae9e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.559927] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 959.559927] env[62109]: value = "task-1116864" [ 959.559927] env[62109]: _type = "Task" [ 959.559927] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.571013] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116864, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.657208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.657208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.709253] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116863, 'name': PowerOffVM_Task, 'duration_secs': 0.346936} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.709253] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 959.709609] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 959.709973] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63a9de01-b274-4b28-8508-3e04c3fa75d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.779416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 959.779721] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 959.779940] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleting the datastore file [datastore2] 0392a352-74e5-4551-9319-eebbc5e20d3b {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.780321] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77dc11d6-18c9-413f-8663-bcb4b35628fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.785274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.785821] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 959.791507] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.439s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.792400] env[62109]: INFO nova.compute.claims [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.795746] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 959.795746] env[62109]: value = "task-1116866" [ 959.795746] env[62109]: _type = "Task" [ 959.795746] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.796183] env[62109]: DEBUG nova.network.neutron [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.808407] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.924324] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.924565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.054801] env[62109]: DEBUG nova.network.neutron [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 960.055198] env[62109]: DEBUG nova.network.neutron [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.069790] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116864, 'name': ReconfigVM_Task, 'duration_secs': 0.316179} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.070507] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfigured VM instance instance-00000055 to attach disk [datastore1] a197a73e-32bc-45b0-ae6f-5275cf74285b/a197a73e-32bc-45b0-ae6f-5275cf74285b.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.070813] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d680fd0-97fd-48eb-abff-84d329d31ab8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.077569] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 960.077569] env[62109]: value = "task-1116867" [ 960.077569] env[62109]: _type = "Task" [ 960.077569] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.085638] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116867, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.134432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.134690] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.160358] env[62109]: DEBUG nova.compute.utils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.298649] env[62109]: DEBUG nova.compute.utils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.301108] env[62109]: INFO nova.compute.manager [-] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Took 1.03 seconds to deallocate network for instance. [ 960.301397] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 960.301586] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 960.318488] env[62109]: DEBUG oslo_vmware.api [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194103} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.318759] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.318949] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 960.319199] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 960.319390] env[62109]: INFO nova.compute.manager [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 960.319679] env[62109]: DEBUG oslo.service.loopingcall [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.319878] env[62109]: DEBUG nova.compute.manager [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.319990] env[62109]: DEBUG nova.network.neutron [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.381661] env[62109]: DEBUG nova.policy [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfa8e274a59f4086bfb08cf73dac8315', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd47712667550407d8846659ec113017b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 960.427340] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 960.558429] env[62109]: DEBUG oslo_concurrency.lockutils [req-2b8a9ef7-1860-4cc8-958a-6ec0db7994f9 req-c3029a6e-88b9-4deb-bb4f-d5cb77cb1f0a service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.589472] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116867, 'name': Rename_Task, 'duration_secs': 0.142947} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.589800] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 960.590142] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c5e4318-84bb-456d-8afc-3fede79f284d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.596870] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 960.596870] env[62109]: value = "task-1116868" [ 960.596870] env[62109]: _type = "Task" [ 960.596870] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.607300] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.637272] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 960.663696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.802196] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 960.810704] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Successfully created port: a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.812899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.913077] env[62109]: DEBUG nova.compute.manager [req-da7eac2b-10dd-4722-9b27-35a689512555 req-61b119dd-918c-498f-97a4-cfffe2ca89bf service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Received event network-vif-deleted-0b15c050-09d2-478d-b46e-797a5ff6bd05 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.913391] env[62109]: INFO nova.compute.manager [req-da7eac2b-10dd-4722-9b27-35a689512555 req-61b119dd-918c-498f-97a4-cfffe2ca89bf service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Neutron deleted interface 0b15c050-09d2-478d-b46e-797a5ff6bd05; detaching it from the instance and deleting it from the info cache [ 960.913577] env[62109]: DEBUG nova.network.neutron [req-da7eac2b-10dd-4722-9b27-35a689512555 req-61b119dd-918c-498f-97a4-cfffe2ca89bf service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.952595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.106829] env[62109]: DEBUG oslo_vmware.api [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1116868, 'name': PowerOnVM_Task, 'duration_secs': 0.456024} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.108023] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Successfully created port: e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.108980] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 961.109244] env[62109]: INFO nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Took 7.07 seconds to spawn the instance on the hypervisor. [ 961.109437] env[62109]: DEBUG nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 961.110297] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e604708-9bc1-4862-9c7a-be0eb2865258 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.128812] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af61da96-a86e-4bf7-8bcd-fc312a831be8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.137302] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65209d1e-ce11-4e26-a1fd-f3459b062c26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.172389] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e2fd96-a8dd-4d7c-851b-0d7cf29b3457 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.181584] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eaef0c1-7c5e-4247-90e9-a85bf00e1df5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.188202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.200846] env[62109]: DEBUG nova.compute.provider_tree [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.218783] env[62109]: DEBUG nova.network.neutron [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.417205] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf97de22-1ae6-4e14-a94c-c6b84f877ea7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.425635] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eea9cc-7887-4452-8c3b-3e60db5e4af8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.455468] env[62109]: DEBUG nova.compute.manager [req-da7eac2b-10dd-4722-9b27-35a689512555 req-61b119dd-918c-498f-97a4-cfffe2ca89bf service nova] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Detach interface failed, port_id=0b15c050-09d2-478d-b46e-797a5ff6bd05, reason: Instance 0392a352-74e5-4551-9319-eebbc5e20d3b could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 961.634047] env[62109]: INFO nova.compute.manager [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Took 37.09 seconds to build instance. [ 961.706486] env[62109]: DEBUG nova.scheduler.client.report [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 961.721570] env[62109]: INFO nova.compute.manager [-] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Took 1.40 seconds to deallocate network for instance. [ 961.753446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.753751] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.754040] env[62109]: INFO nova.compute.manager [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Attaching volume c2c1fe43-1dae-4c41-a564-3d09f609743e to /dev/sdb [ 961.791905] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1531fc4-fa8b-4d42-83c9-2083772640cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.798804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5790129-585c-4adc-ab13-ac2ee490e00d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.812630] env[62109]: DEBUG nova.virt.block_device [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating existing volume attachment record: fd859860-3d3c-4039-a1c2-dcafedae487b {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 961.817765] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 961.844373] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 961.845105] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 961.845105] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.845236] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 961.845948] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.846120] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 961.846351] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 961.846534] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 961.846726] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 961.846974] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 961.847110] env[62109]: DEBUG nova.virt.hardware [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 961.848165] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a913eba-5dc9-4a7d-9a8e-7d51899cf8b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.858166] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc26175-6e29-4aa9-9cc9-35e900f78eb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.136882] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9865a38b-fe47-406e-bdcf-874ec78ad1c4 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.604s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.213255] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.213255] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 962.217038] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 19.695s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.227468] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.479460] env[62109]: DEBUG nova.compute.manager [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Received event network-changed-98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.479682] env[62109]: DEBUG nova.compute.manager [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Refreshing instance network info cache due to event network-changed-98e332f4-3bb6-4be2-b072-a11329289f58. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.479929] env[62109]: DEBUG oslo_concurrency.lockutils [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] Acquiring lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.480313] env[62109]: DEBUG oslo_concurrency.lockutils [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] Acquired lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.480561] env[62109]: DEBUG nova.network.neutron [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Refreshing network info cache for port 98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 962.721486] env[62109]: DEBUG nova.compute.utils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 962.727302] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 962.727537] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 962.784714] env[62109]: DEBUG nova.policy [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 962.863140] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Successfully updated port: a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.945912] env[62109]: DEBUG nova.compute.manager [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-vif-plugged-a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.946150] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Acquiring lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.946466] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.946528] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.946713] env[62109]: DEBUG nova.compute.manager [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] No waiting events found dispatching network-vif-plugged-a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 962.946868] env[62109]: WARNING nova.compute.manager [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received unexpected event network-vif-plugged-a9a12ab6-6933-4a0d-969e-48319fbc9121 for instance with vm_state building and task_state spawning. [ 962.946992] env[62109]: DEBUG nova.compute.manager [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-changed-a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.947207] env[62109]: DEBUG nova.compute.manager [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Refreshing instance network info cache due to event network-changed-a9a12ab6-6933-4a0d-969e-48319fbc9121. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.947387] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Acquiring lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.947594] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Acquired lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.947845] env[62109]: DEBUG nova.network.neutron [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Refreshing network info cache for port a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 963.058022] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc82fa8-2010-4264-b80c-44d1319c6d62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.071982] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb5eaab-a732-4f3b-824c-cae6b93cfca5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.108642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55382687-1532-40ea-933b-cbdeab76bd84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.117415] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a3a219-122f-4f23-8df4-f4bdecbf9b8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.132090] env[62109]: DEBUG nova.compute.provider_tree [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.190842] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Successfully created port: 5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.228024] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 963.381946] env[62109]: DEBUG nova.network.neutron [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updated VIF entry in instance network info cache for port 98e332f4-3bb6-4be2-b072-a11329289f58. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 963.382340] env[62109]: DEBUG nova.network.neutron [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating instance_info_cache with network_info: [{"id": "98e332f4-3bb6-4be2-b072-a11329289f58", "address": "fa:16:3e:92:f4:d4", "network": {"id": "66a020c3-cdbc-464e-83aa-02e9126e8492", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1240081161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bff923ccb02449aa834523a0652cbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98e332f4-3b", "ovs_interfaceid": "98e332f4-3bb6-4be2-b072-a11329289f58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.499528] env[62109]: DEBUG nova.network.neutron [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.606090] env[62109]: DEBUG nova.network.neutron [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.634809] env[62109]: DEBUG nova.scheduler.client.report [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 963.885510] env[62109]: DEBUG oslo_concurrency.lockutils [req-69ee1737-6a7d-4031-bc92-229a37fb0f9d req-68cb290f-0841-416d-81b2-ee1e4ee39a20 service nova] Releasing lock "refresh_cache-a197a73e-32bc-45b0-ae6f-5275cf74285b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.112707] env[62109]: DEBUG oslo_concurrency.lockutils [req-d168dff2-fdaf-4b06-9613-2fdc69839856 req-352dc2ba-68e1-4106-b56e-9fd5a20f8e21 service nova] Releasing lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.236220] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 964.263311] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 964.263578] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 964.263756] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.263959] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 964.264420] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.264420] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 964.264513] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 964.264691] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 964.264835] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 964.264996] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 964.265129] env[62109]: DEBUG nova.virt.hardware [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 964.266034] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfe531d-1ade-40c2-93e3-1b5e6b8935ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.274240] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a6d8a-6b7e-4672-8cbf-5b3e8c188580 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.647763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.432s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.647996] env[62109]: DEBUG nova.compute.manager [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62109) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4909}} [ 964.650985] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.031s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.651206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.653459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.405s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.654768] env[62109]: INFO nova.compute.claims [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.683092] env[62109]: INFO nova.scheduler.client.report [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Deleted allocations for instance 7ace6356-1a81-4095-8286-c9b6d829062b [ 964.818099] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Successfully updated port: 5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.939836] env[62109]: DEBUG nova.compute.manager [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-vif-plugged-e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.940113] env[62109]: DEBUG oslo_concurrency.lockutils [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] Acquiring lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.940312] env[62109]: DEBUG oslo_concurrency.lockutils [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.940473] env[62109]: DEBUG oslo_concurrency.lockutils [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.940641] env[62109]: DEBUG nova.compute.manager [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] No waiting events found dispatching network-vif-plugged-e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 964.940810] env[62109]: WARNING nova.compute.manager [req-c5bc5506-731a-47a5-b220-5f992a794333 req-25b1029a-28b8-4e6f-8c27-f80759be0f97 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received unexpected event network-vif-plugged-e7859833-4b92-4db6-9be7-2aa396869294 for instance with vm_state building and task_state spawning. [ 965.018603] env[62109]: DEBUG nova.compute.manager [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Received event network-vif-plugged-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.018838] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Acquiring lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.019062] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.019299] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.019497] env[62109]: DEBUG nova.compute.manager [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] No waiting events found dispatching network-vif-plugged-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 965.019667] env[62109]: WARNING nova.compute.manager [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Received unexpected event network-vif-plugged-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 for instance with vm_state building and task_state spawning. [ 965.019829] env[62109]: DEBUG nova.compute.manager [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Received event network-changed-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.019985] env[62109]: DEBUG nova.compute.manager [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Refreshing instance network info cache due to event network-changed-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 965.020612] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Acquiring lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.020760] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Acquired lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.020964] env[62109]: DEBUG nova.network.neutron [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Refreshing network info cache for port 5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 965.193154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-84da6941-7286-4645-bc98-a0385aaf5fd0 tempest-SecurityGroupsTestJSON-1620090374 tempest-SecurityGroupsTestJSON-1620090374-project-member] Lock "7ace6356-1a81-4095-8286-c9b6d829062b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.616s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.213695] env[62109]: INFO nova.scheduler.client.report [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted allocation for migration a1545213-e534-441e-8d4e-0d5c5aa97374 [ 965.319954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.415965] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Successfully updated port: e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.576366] env[62109]: DEBUG nova.network.neutron [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.719340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c246824a-003d-4824-b3ac-d78b0c9da418 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 26.568s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.738546] env[62109]: DEBUG nova.network.neutron [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.919793] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.919873] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.920126] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.929263] env[62109]: DEBUG nova.objects.instance [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.973688] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5086685c-7dd3-43ef-ad99-2e5ec0939492 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.981970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef37779-70b8-4c88-9ed2-905ef9ddc102 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.013209] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee4df08-b4ca-4880-8632-64e692b8f2b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.023019] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbbeed5-dda2-47b5-93cd-9643a63c9095 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.034302] env[62109]: DEBUG nova.compute.provider_tree [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.246044] env[62109]: DEBUG oslo_concurrency.lockutils [req-38488da5-41fa-4746-8799-544109029f0f req-bfed83af-08ac-4fda-9476-43d9fdcb006c service nova] Releasing lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.246044] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.246044] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.360358] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 966.360613] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244500', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'name': 'volume-c2c1fe43-1dae-4c41-a564-3d09f609743e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '39c17e34-c8c0-4a66-8d22-717efcb984bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'serial': 'c2c1fe43-1dae-4c41-a564-3d09f609743e'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 966.362174] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f10572e-5826-466d-946e-4578c4d08801 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.378399] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c530297-bbc1-4a92-8276-87a9d1b81035 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.410589] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] volume-c2c1fe43-1dae-4c41-a564-3d09f609743e/volume-c2c1fe43-1dae-4c41-a564-3d09f609743e.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.411573] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd5006a4-e4ec-4d7e-9ec5-2f5ed9ecee52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.433550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.433729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.433902] env[62109]: DEBUG nova.network.neutron [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.434332] env[62109]: DEBUG nova.objects.instance [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'info_cache' on Instance uuid 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.437853] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 966.437853] env[62109]: value = "task-1116873" [ 966.437853] env[62109]: _type = "Task" [ 966.437853] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.446823] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116873, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.493683] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.537275] env[62109]: DEBUG nova.scheduler.client.report [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.789148] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.886367] env[62109]: DEBUG nova.network.neutron [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Updating instance_info_cache with network_info: [{"id": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "address": "fa:16:3e:7b:a5:4c", "network": {"id": "967a429e-c150-4f53-95e3-48d3da41d32b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-726196408", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9a12ab6-69", "ovs_interfaceid": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e7859833-4b92-4db6-9be7-2aa396869294", "address": "fa:16:3e:c2:ed:7e", "network": {"id": "8738fd16-3c18-495e-9d3a-3cb64f18bad0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1028255606", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7859833-4b", "ovs_interfaceid": "e7859833-4b92-4db6-9be7-2aa396869294", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.937815] env[62109]: DEBUG nova.objects.base [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Object Instance<8b63f9a1-5639-48b2-b0a9-30380835bef2> lazy-loaded attributes: flavor,info_cache {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 966.951945] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116873, 'name': ReconfigVM_Task, 'duration_secs': 0.338396} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.952956] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfigured VM instance instance-0000004f to attach disk [datastore1] volume-c2c1fe43-1dae-4c41-a564-3d09f609743e/volume-c2c1fe43-1dae-4c41-a564-3d09f609743e.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.960544] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75c9c031-fcd0-42c1-80b7-f3cc4d8bf963 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.978387] env[62109]: DEBUG nova.compute.manager [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-changed-e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 966.978763] env[62109]: DEBUG nova.compute.manager [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Refreshing instance network info cache due to event network-changed-e7859833-4b92-4db6-9be7-2aa396869294. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 966.978763] env[62109]: DEBUG oslo_concurrency.lockutils [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] Acquiring lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.984801] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 966.984801] env[62109]: value = "task-1116874" [ 966.984801] env[62109]: _type = "Task" [ 966.984801] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.993666] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116874, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.020192] env[62109]: DEBUG nova.network.neutron [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Updating instance_info_cache with network_info: [{"id": "5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3", "address": "fa:16:3e:d9:18:a5", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aec6f1b-0d", "ovs_interfaceid": "5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.043296] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.043810] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 967.046304] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.721s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.047706] env[62109]: INFO nova.compute.claims [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.389654] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.390103] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance network_info: |[{"id": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "address": "fa:16:3e:7b:a5:4c", "network": {"id": "967a429e-c150-4f53-95e3-48d3da41d32b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-726196408", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9a12ab6-69", "ovs_interfaceid": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e7859833-4b92-4db6-9be7-2aa396869294", "address": "fa:16:3e:c2:ed:7e", "network": {"id": "8738fd16-3c18-495e-9d3a-3cb64f18bad0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1028255606", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7859833-4b", "ovs_interfaceid": "e7859833-4b92-4db6-9be7-2aa396869294", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 967.390465] env[62109]: DEBUG oslo_concurrency.lockutils [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] Acquired lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.390667] env[62109]: DEBUG nova.network.neutron [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Refreshing network info cache for port e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 967.392036] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:a5:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9a12ab6-6933-4a0d-969e-48319fbc9121', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:ed:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a407774d-9c2a-411d-9d6f-9ca733b97f3f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7859833-4b92-4db6-9be7-2aa396869294', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.402052] env[62109]: DEBUG oslo.service.loopingcall [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.405033] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 967.405552] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7723970-c9ea-4bad-9d04-20c83984b7b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.429067] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.429067] env[62109]: value = "task-1116875" [ 967.429067] env[62109]: _type = "Task" [ 967.429067] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.437461] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116875, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.500356] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116874, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.524302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-35a13db2-f645-4634-86e0-7e9a6a24fc66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.524638] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance network_info: |[{"id": "5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3", "address": "fa:16:3e:d9:18:a5", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aec6f1b-0d", "ovs_interfaceid": "5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 967.525088] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:18:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.532946] env[62109]: DEBUG oslo.service.loopingcall [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.535297] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 967.535542] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d683b84-3379-4b2a-9b40-19b56f0eb6d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.552976] env[62109]: DEBUG nova.compute.utils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 967.556516] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 967.556689] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 967.561021] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.561021] env[62109]: value = "task-1116876" [ 967.561021] env[62109]: _type = "Task" [ 967.561021] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.570854] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116876, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.621399] env[62109]: DEBUG nova.policy [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd20b2aa2aa4419597ddca6f95ced41f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '093c284d31de414cb583d501864456c8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 967.749647] env[62109]: DEBUG nova.network.neutron [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Updated VIF entry in instance network info cache for port e7859833-4b92-4db6-9be7-2aa396869294. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 967.750099] env[62109]: DEBUG nova.network.neutron [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Updating instance_info_cache with network_info: [{"id": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "address": "fa:16:3e:7b:a5:4c", "network": {"id": "967a429e-c150-4f53-95e3-48d3da41d32b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-726196408", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9a12ab6-69", "ovs_interfaceid": "a9a12ab6-6933-4a0d-969e-48319fbc9121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e7859833-4b92-4db6-9be7-2aa396869294", "address": "fa:16:3e:c2:ed:7e", "network": {"id": "8738fd16-3c18-495e-9d3a-3cb64f18bad0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1028255606", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d47712667550407d8846659ec113017b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7859833-4b", "ovs_interfaceid": "e7859833-4b92-4db6-9be7-2aa396869294", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.846151] env[62109]: DEBUG nova.network.neutron [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [{"id": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "address": "fa:16:3e:83:01:bf", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4891b0-c5", "ovs_interfaceid": "7c4891b0-c525-4571-aa3b-47cc9a42d8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.946860] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116875, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.954982] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Successfully created port: bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.000042] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116874, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.057295] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 968.072176] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116876, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.253991] env[62109]: DEBUG oslo_concurrency.lockutils [req-6a2ecea5-b2eb-4a62-913b-e16cf546c962 req-1f88894a-3a26-404c-98e3-6197d44fcdf7 service nova] Releasing lock "refresh_cache-50c93e9e-5af6-489e-ac8a-29b8a6777a4e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.350355] env[62109]: DEBUG oslo_concurrency.lockutils [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-8b63f9a1-5639-48b2-b0a9-30380835bef2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.373889] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690f14a7-e341-441d-a0c9-6ba29c135a98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.384211] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b23a4b-76ca-4161-ad45-c5e8cb179530 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.417857] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25584ac5-0e4b-4790-a34d-1cb2e8bd0f3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.426122] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a02c87-4495-41d4-a2a0-cc114dbd7917 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.445420] env[62109]: DEBUG nova.compute.provider_tree [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.451009] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116875, 'name': CreateVM_Task, 'duration_secs': 0.630082} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.451459] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 968.452491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.452892] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.453132] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.453424] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f81924b-83d1-4057-98b0-b3fec1aa7319 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.459167] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 968.459167] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fdb0-bb7b-7d89-4e0f-48f91a822915" [ 968.459167] env[62109]: _type = "Task" [ 968.459167] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.468453] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fdb0-bb7b-7d89-4e0f-48f91a822915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.496420] env[62109]: DEBUG oslo_vmware.api [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116874, 'name': ReconfigVM_Task, 'duration_secs': 1.163714} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.496420] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244500', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'name': 'volume-c2c1fe43-1dae-4c41-a564-3d09f609743e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '39c17e34-c8c0-4a66-8d22-717efcb984bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'serial': 'c2c1fe43-1dae-4c41-a564-3d09f609743e'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 968.581929] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116876, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.855152] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 968.855478] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0dad56a2-363a-47b9-972e-889f72973a77 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.865743] env[62109]: DEBUG oslo_vmware.api [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 968.865743] env[62109]: value = "task-1116877" [ 968.865743] env[62109]: _type = "Task" [ 968.865743] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.874081] env[62109]: DEBUG oslo_vmware.api [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.952537] env[62109]: DEBUG nova.scheduler.client.report [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.969711] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fdb0-bb7b-7d89-4e0f-48f91a822915, 'name': SearchDatastore_Task, 'duration_secs': 0.011131} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.970625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.970872] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.971206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.971303] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.971501] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.972017] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c92921c4-9d00-4b77-bbcd-c6a8a8b7f317 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.982916] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.983135] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 968.983866] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2467a010-775d-45bc-9dc8-a0e31963c6ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.989290] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 968.989290] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a8b5cb-d106-64a6-9803-af0c045d2370" [ 968.989290] env[62109]: _type = "Task" [ 968.989290] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.996786] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a8b5cb-d106-64a6-9803-af0c045d2370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.075226] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 969.077236] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116876, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.104612] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.104897] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.105081] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.105276] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.105433] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.105584] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.105805] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.105975] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.106304] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.106495] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.106703] env[62109]: DEBUG nova.virt.hardware [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.107677] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54ee5fb-6f19-400e-bb4c-b1772db3e792 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.115637] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e90db70-ad9a-4429-ae45-501aaad969d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.260464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.260837] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.261056] env[62109]: DEBUG nova.compute.manager [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 969.261935] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a97ce2f-0f4d-424e-8ab4-15f7a3d0e19c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.268625] env[62109]: DEBUG nova.compute.manager [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 969.269217] env[62109]: DEBUG nova.objects.instance [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'flavor' on Instance uuid 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.376082] env[62109]: DEBUG oslo_vmware.api [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116877, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.457807] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.458484] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 969.461312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.920s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.461566] env[62109]: DEBUG nova.objects.instance [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'resources' on Instance uuid 66bbe1e6-e5ee-46a0-b95c-449eef636509 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.500546] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a8b5cb-d106-64a6-9803-af0c045d2370, 'name': SearchDatastore_Task, 'duration_secs': 0.011866} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.501950] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b81d3e-93e7-4eb4-bae4-bdf879fe9c96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.507680] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 969.507680] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e603d1-f22f-5e84-a03c-ab3d6addb8d7" [ 969.507680] env[62109]: _type = "Task" [ 969.507680] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.515755] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e603d1-f22f-5e84-a03c-ab3d6addb8d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.546331] env[62109]: DEBUG nova.objects.instance [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'flavor' on Instance uuid 39c17e34-c8c0-4a66-8d22-717efcb984bc {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.573205] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116876, 'name': CreateVM_Task, 'duration_secs': 2.002944} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.573376] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 969.574071] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.574245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.574579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 969.574825] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95e972c0-881e-447a-b8ac-b5e860f9a2c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.579765] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 969.579765] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5240d381-b85a-77c1-2b59-15126f261a58" [ 969.579765] env[62109]: _type = "Task" [ 969.579765] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.588205] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5240d381-b85a-77c1-2b59-15126f261a58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.777645] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 969.777938] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f82d0d98-4bfa-4649-96fb-b692098b3ba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.785709] env[62109]: DEBUG oslo_vmware.api [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 969.785709] env[62109]: value = "task-1116878" [ 969.785709] env[62109]: _type = "Task" [ 969.785709] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.795275] env[62109]: DEBUG oslo_vmware.api [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.876306] env[62109]: DEBUG oslo_vmware.api [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116877, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.918015] env[62109]: DEBUG nova.compute.manager [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.918235] env[62109]: DEBUG oslo_concurrency.lockutils [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.918448] env[62109]: DEBUG oslo_concurrency.lockutils [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.918623] env[62109]: DEBUG oslo_concurrency.lockutils [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.918844] env[62109]: DEBUG nova.compute.manager [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] No waiting events found dispatching network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 969.919086] env[62109]: WARNING nova.compute.manager [req-7e6c2e00-b0a6-417b-bbc7-4d73400ec15b req-22d50cce-b37f-479a-9411-b48f1c4f19ec service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received unexpected event network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f for instance with vm_state building and task_state spawning. [ 969.965017] env[62109]: DEBUG nova.compute.utils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.969675] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 969.969869] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 970.008792] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Successfully updated port: bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.022251] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e603d1-f22f-5e84-a03c-ab3d6addb8d7, 'name': SearchDatastore_Task, 'duration_secs': 0.009407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.025089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.025397] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 50c93e9e-5af6-489e-ac8a-29b8a6777a4e/50c93e9e-5af6-489e-ac8a-29b8a6777a4e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 970.026452] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9af25727-04ac-48ec-ad83-29fca189eaea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.033678] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 970.033678] env[62109]: value = "task-1116879" [ 970.033678] env[62109]: _type = "Task" [ 970.033678] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.042031] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.052579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0b57759-c1c4-4f13-993f-5c87c49b7481 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.299s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.093222] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5240d381-b85a-77c1-2b59-15126f261a58, 'name': SearchDatastore_Task, 'duration_secs': 0.009325} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.097109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.097572] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.097862] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.098621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.098967] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.099376] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd59fce9-9002-4c75-9cb2-caa825d1f475 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.117443] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.117646] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 970.118393] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a25ccef-f698-4c96-a5bd-a773607d5d8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.123430] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 970.123430] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52818101-9e13-d632-2b0e-ebf9d68a9f4e" [ 970.123430] env[62109]: _type = "Task" [ 970.123430] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.129970] env[62109]: DEBUG nova.policy [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e608055854844801b9f7c51d07820917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca12aa68e4b4d4d8cf1e3332deb44f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 970.136598] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52818101-9e13-d632-2b0e-ebf9d68a9f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009095} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.137414] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c6475c3-1b0c-4b9d-9c34-893afbe3095b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.143170] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 970.143170] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6ed34-7c7f-e499-04cc-df1147294863" [ 970.143170] env[62109]: _type = "Task" [ 970.143170] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.156269] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6ed34-7c7f-e499-04cc-df1147294863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.213737] env[62109]: INFO nova.compute.manager [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Rescuing [ 970.214771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.214960] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.215765] env[62109]: DEBUG nova.network.neutron [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.292983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c2a3fa-02fe-4f46-87b9-3aec5814e578 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.299502] env[62109]: DEBUG oslo_vmware.api [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116878, 'name': PowerOffVM_Task, 'duration_secs': 0.399154} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.300321] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 970.300527] env[62109]: DEBUG nova.compute.manager [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 970.301265] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab0c31a-57a0-4e75-ba4a-d323bf883819 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.309421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42416dc9-707b-468e-af53-b1076f602f30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.343491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50558c7d-d793-444c-9852-0967d89c0866 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.354679] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d60d37-9dfa-48a6-91bd-128c4a10b86b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.369707] env[62109]: DEBUG nova.compute.provider_tree [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.381538] env[62109]: DEBUG oslo_vmware.api [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116877, 'name': PowerOnVM_Task, 'duration_secs': 1.02063} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.383402] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 970.383402] env[62109]: DEBUG nova.compute.manager [None req-57a39e2c-7110-4a44-ad4e-1771b85cace6 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 970.383828] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6de8a8-ea23-4d32-8ef6-05a4b0d619d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.470391] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 970.514904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.515072] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.515248] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.545559] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116879, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.579812] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Successfully created port: e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.657710] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f6ed34-7c7f-e499-04cc-df1147294863, 'name': SearchDatastore_Task, 'duration_secs': 0.011191} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.658019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.658295] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 35a13db2-f645-4634-86e0-7e9a6a24fc66/35a13db2-f645-4634-86e0-7e9a6a24fc66.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 970.658641] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95f73c53-64e6-4237-83c6-45bd49ac9fbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.668309] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 970.668309] env[62109]: value = "task-1116880" [ 970.668309] env[62109]: _type = "Task" [ 970.668309] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.677239] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.818607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a09c8aba-cc46-47b7-a0f8-1c0847d28ea6 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.874934] env[62109]: DEBUG nova.scheduler.client.report [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.954492] env[62109]: DEBUG nova.network.neutron [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.046010] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.818565} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.046597] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 50c93e9e-5af6-489e-ac8a-29b8a6777a4e/50c93e9e-5af6-489e-ac8a-29b8a6777a4e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 971.046823] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.047111] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9353db25-d78f-4066-8d96-a5c7ea49914b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.050823] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.055620] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 971.055620] env[62109]: value = "task-1116881" [ 971.055620] env[62109]: _type = "Task" [ 971.055620] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.066354] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.178776] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116880, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.211131] env[62109]: DEBUG nova.network.neutron [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.380117] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.382487] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.165s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.382782] env[62109]: DEBUG nova.objects.instance [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 971.409090] env[62109]: INFO nova.scheduler.client.report [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocations for instance 66bbe1e6-e5ee-46a0-b95c-449eef636509 [ 971.456884] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.479917] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 971.523080] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.523080] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.523404] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.523448] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.525024] env[62109]: DEBUG nova.virt.hardware [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.526466] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e8c7f1-cbbf-448b-adce-14c5efb52270 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.538194] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee71b82-2ced-402e-bd79-4c3de61ec821 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.565015] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099991} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.565364] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.566202] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bead05fa-14e5-463b-8553-304c75056e11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.595114] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 50c93e9e-5af6-489e-ac8a-29b8a6777a4e/50c93e9e-5af6-489e-ac8a-29b8a6777a4e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.595459] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a97f1ba0-ce75-402d-9071-6ca775058a7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.618118] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 971.618118] env[62109]: value = "task-1116882" [ 971.618118] env[62109]: _type = "Task" [ 971.618118] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.632578] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.679998] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643328} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.680382] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 35a13db2-f645-4634-86e0-7e9a6a24fc66/35a13db2-f645-4634-86e0-7e9a6a24fc66.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 971.680708] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.681051] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8a2deaf-20ab-4dd9-bfa8-91baf6a3c329 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.424790] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Successfully updated port: e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.429143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.429716] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance network_info: |[{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 972.435326] env[62109]: DEBUG nova.objects.instance [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'flavor' on Instance uuid 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.444069] env[62109]: DEBUG nova.compute.manager [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 972.444069] env[62109]: DEBUG nova.compute.manager [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing instance network info cache due to event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 972.444176] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.444291] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.444776] env[62109]: DEBUG nova.network.neutron [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 972.446468] env[62109]: DEBUG nova.compute.manager [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Received event network-vif-plugged-e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 972.446672] env[62109]: DEBUG oslo_concurrency.lockutils [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.446881] env[62109]: DEBUG oslo_concurrency.lockutils [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.447073] env[62109]: DEBUG oslo_concurrency.lockutils [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.447273] env[62109]: DEBUG nova.compute.manager [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] No waiting events found dispatching network-vif-plugged-e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 972.449068] env[62109]: WARNING nova.compute.manager [req-3579ac9d-689c-4402-b6ba-efd7cfd3c8da req-9e1bd462-273c-47e8-89ef-b973e8fa5570 service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Received unexpected event network-vif-plugged-e5781b49-4005-4203-8390-dc6af21b6eda for instance with vm_state building and task_state spawning. [ 972.449217] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 972.449675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-abc4ae16-8260-4e47-a024-7ac8d7f6ac53 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "66bbe1e6-e5ee-46a0-b95c-449eef636509" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.785s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.452052] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:0e:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bea956d1-ceb2-4342-bef6-6a37fef7ec4f', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.460151] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating folder: Project (093c284d31de414cb583d501864456c8). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 972.466009] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edc7e900-ca28-4189-b80c-4b2d394fa8af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.472467] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06c2537e-cdc8-4d8b-a290-79fd14d1c867 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.474606] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 972.474606] env[62109]: value = "task-1116883" [ 972.474606] env[62109]: _type = "Task" [ 972.474606] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.482439] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116882, 'name': ReconfigVM_Task, 'duration_secs': 0.289178} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.484223] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 50c93e9e-5af6-489e-ac8a-29b8a6777a4e/50c93e9e-5af6-489e-ac8a-29b8a6777a4e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.484970] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 972.484970] env[62109]: value = "task-1116884" [ 972.484970] env[62109]: _type = "Task" [ 972.484970] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.486335] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01954fb7-378f-481f-87f9-bbc46f359310 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.487857] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created folder: Project (093c284d31de414cb583d501864456c8) in parent group-v244329. [ 972.488060] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating folder: Instances. Parent ref: group-v244503. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 972.494149] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e65cf88-0720-453c-8e3b-7870e3c0d56f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.495881] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116883, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.501958] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.503205] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 972.503205] env[62109]: value = "task-1116886" [ 972.503205] env[62109]: _type = "Task" [ 972.503205] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.507403] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created folder: Instances in parent group-v244503. [ 972.507663] env[62109]: DEBUG oslo.service.loopingcall [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.508212] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 972.508439] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6ddbb05-242f-4d19-9ea3-d7f4a12d7d30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.526475] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116886, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.532730] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.532730] env[62109]: value = "task-1116888" [ 972.532730] env[62109]: _type = "Task" [ 972.532730] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.543949] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116888, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.927738] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.928258] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.928258] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 972.939939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c3ab2567-7578-4417-bfe1-a9ac75a1b0d8 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.947215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.129s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.947670] env[62109]: DEBUG nova.objects.instance [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lazy-loading 'resources' on Instance uuid e7e232c4-a2cb-44eb-8ee3-11fc12ee152a {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.952597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.952912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.953162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.953356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.953563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.957862] env[62109]: INFO nova.compute.manager [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Terminating instance [ 972.963141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.963324] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.963502] env[62109]: DEBUG nova.network.neutron [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 972.963751] env[62109]: DEBUG nova.objects.instance [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'info_cache' on Instance uuid 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.965338] env[62109]: DEBUG nova.compute.manager [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 972.965467] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 972.967109] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d3ecde-6eb7-46f4-b586-3cbf7d2dc1a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.975943] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 972.979147] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b96a5bc0-8363-4793-bce0-094cff640969 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.986425] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116883, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079468} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.987840] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.988214] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 972.988214] env[62109]: value = "task-1116889" [ 972.988214] env[62109]: _type = "Task" [ 972.988214] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.989690] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ca6ab1-ea3c-4ae5-b1b9-c80b4f4f87f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.035174] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 35a13db2-f645-4634-86e0-7e9a6a24fc66/35a13db2-f645-4634-86e0-7e9a6a24fc66.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.035518] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116884, 'name': PowerOffVM_Task, 'duration_secs': 0.471974} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.035779] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.039097] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc074f93-d91e-4bfb-bd3c-75a50ec10a32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.053188] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 973.060447] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b586b3-26b3-4bf5-a263-2eaeaadc2c6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.063298] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116886, 'name': Rename_Task, 'duration_secs': 0.252371} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.066291] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 973.067215] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65a3413b-ff71-4475-9392-6845cccc1887 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.072317] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116888, 'name': CreateVM_Task, 'duration_secs': 0.436792} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.088786] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 973.089403] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 973.089403] env[62109]: value = "task-1116890" [ 973.089403] env[62109]: _type = "Task" [ 973.089403] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.093443] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.096519] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.096519] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 973.096519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc174ae-1edd-4acd-ab63-0fed08b7e284 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.097614] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 973.097614] env[62109]: value = "task-1116891" [ 973.097614] env[62109]: _type = "Task" [ 973.097614] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.098235] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1698e3c7-93df-46d9-a25e-3c45de1376a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.122566] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 973.122566] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52010f62-9aa0-b1d3-2d41-db9dd545e42e" [ 973.122566] env[62109]: _type = "Task" [ 973.122566] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.122566] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.122566] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116891, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.133461] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52010f62-9aa0-b1d3-2d41-db9dd545e42e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.153456] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 973.153741] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d172f65-4be1-43b5-8492-de727e2319df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.163308] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 973.163308] env[62109]: value = "task-1116892" [ 973.163308] env[62109]: _type = "Task" [ 973.163308] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.175236] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 973.175498] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.175769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.175979] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.176227] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.176511] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c8d3e36-9b27-47e3-b831-396e51174fae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.185481] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.185596] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 973.189072] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8db8855f-22aa-4594-980c-a6c4f2a3bbd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.195240] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 973.195240] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d729e-7cc5-c576-1a25-3920bb9a450c" [ 973.195240] env[62109]: _type = "Task" [ 973.195240] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.203801] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d729e-7cc5-c576-1a25-3920bb9a450c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.330286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9741fa2d-5691-4c9e-8c61-92cd664b1537 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.339661] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc1bc62-d151-4290-bd2a-4e84a1cebd8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.369090] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fd2823-c29d-4b4b-bf3e-79860e19e072 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.378934] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb20deb-75df-4cf3-a468-31d1f6fc35a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.392561] env[62109]: DEBUG nova.compute.provider_tree [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.458693] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 973.471690] env[62109]: DEBUG nova.objects.base [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Object Instance<93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af> lazy-loaded attributes: flavor,info_cache {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 973.503755] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116889, 'name': PowerOffVM_Task, 'duration_secs': 0.172219} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.506542] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 973.506837] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 973.507147] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0aa977b-4e5f-46ef-a5cd-965d3a30be7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.554415] env[62109]: DEBUG nova.network.neutron [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updated VIF entry in instance network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 973.554831] env[62109]: DEBUG nova.network.neutron [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.588291] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 973.589032] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 973.589032] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleting the datastore file [datastore1] 8b63f9a1-5639-48b2-b0a9-30380835bef2 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.589032] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e661fa2-e035-438e-b3f1-d6c7ffff70e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.595894] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 973.595894] env[62109]: value = "task-1116894" [ 973.595894] env[62109]: _type = "Task" [ 973.595894] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.610573] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.614111] env[62109]: DEBUG nova.network.neutron [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.618380] env[62109]: DEBUG oslo_vmware.api [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116891, 'name': PowerOnVM_Task, 'duration_secs': 0.461701} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.618380] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116890, 'name': ReconfigVM_Task, 'duration_secs': 0.269812} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.618616] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 973.618823] env[62109]: INFO nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Took 11.80 seconds to spawn the instance on the hypervisor. [ 973.619014] env[62109]: DEBUG nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 973.619314] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 35a13db2-f645-4634-86e0-7e9a6a24fc66/35a13db2-f645-4634-86e0-7e9a6a24fc66.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.620445] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba7aa57-a702-4507-96fb-83be7b92f264 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.622876] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3aee4eff-0062-4556-8afa-058031a383c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.638686] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52010f62-9aa0-b1d3-2d41-db9dd545e42e, 'name': SearchDatastore_Task, 'duration_secs': 0.019376} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.638983] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 973.638983] env[62109]: value = "task-1116895" [ 973.638983] env[62109]: _type = "Task" [ 973.638983] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.639498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.639748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.640061] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.648280] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116895, 'name': Rename_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.705382] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522d729e-7cc5-c576-1a25-3920bb9a450c, 'name': SearchDatastore_Task, 'duration_secs': 0.009924} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.706225] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba9ce0e-ce5d-4012-a4b3-377eb4e31b69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.711730] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 973.711730] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52132133-f21e-4477-f298-75523aadd56e" [ 973.711730] env[62109]: _type = "Task" [ 973.711730] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.719205] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52132133-f21e-4477-f298-75523aadd56e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.912108] env[62109]: ERROR nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] [req-35bb0957-5541-4b70-a811-9def06d471cc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-35bb0957-5541-4b70-a811-9def06d471cc"}]} [ 973.930051] env[62109]: DEBUG nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 973.943860] env[62109]: DEBUG nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 973.944121] env[62109]: DEBUG nova.compute.provider_tree [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.955493] env[62109]: DEBUG nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 973.973044] env[62109]: DEBUG nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 974.059051] env[62109]: DEBUG oslo_concurrency.lockutils [req-e0bcd9ab-7f1b-45d3-b874-b50c46b18f15 req-fe228ce8-da1c-4a6d-b1f0-8a756995f63e service nova] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.105364] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.120768] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.121077] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Instance network_info: |[{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 974.121489] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:53:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5781b49-4005-4203-8390-dc6af21b6eda', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.128857] env[62109]: DEBUG oslo.service.loopingcall [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.131436] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 974.131828] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78cddcc4-7a74-4424-a4f1-2800fa91a0b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.158470] env[62109]: INFO nova.compute.manager [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Took 42.90 seconds to build instance. [ 974.166758] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.166758] env[62109]: value = "task-1116896" [ 974.166758] env[62109]: _type = "Task" [ 974.166758] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.167045] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116895, 'name': Rename_Task, 'duration_secs': 0.228881} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.171860] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 974.175691] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f259232a-9657-45c6-949b-4a624ebc5d2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.185488] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116896, 'name': CreateVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.189382] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 974.189382] env[62109]: value = "task-1116897" [ 974.189382] env[62109]: _type = "Task" [ 974.189382] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.198178] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.216875] env[62109]: DEBUG nova.network.neutron [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.224983] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52132133-f21e-4477-f298-75523aadd56e, 'name': SearchDatastore_Task, 'duration_secs': 0.011934} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.228542] env[62109]: DEBUG oslo_concurrency.lockutils [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.228909] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 974.230180] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.230180] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.230406] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9295aa4-9a31-4dec-bc9d-5ae6a35bff64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.232648] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de227eac-ab26-4f79-a992-560e7f1e33d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.241017] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 974.241017] env[62109]: value = "task-1116898" [ 974.241017] env[62109]: _type = "Task" [ 974.241017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.242991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f3e328-415b-48ac-9c98-62d9417f10bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.250956] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.251165] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 974.252504] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef204bb0-3c09-4f43-ba28-d21e64150002 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.258623] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c837153e-0359-4b9c-9a79-f4b9632ae6d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.262656] env[62109]: DEBUG nova.compute.manager [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Received event network-changed-e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.262847] env[62109]: DEBUG nova.compute.manager [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Refreshing instance network info cache due to event network-changed-e5781b49-4005-4203-8390-dc6af21b6eda. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 974.263071] env[62109]: DEBUG oslo_concurrency.lockutils [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] Acquiring lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.263220] env[62109]: DEBUG oslo_concurrency.lockutils [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] Acquired lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.263799] env[62109]: DEBUG nova.network.neutron [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Refreshing network info cache for port e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 974.268547] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 974.268547] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52588e91-9ec9-417c-8d59-ad6618266051" [ 974.268547] env[62109]: _type = "Task" [ 974.268547] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.268990] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.300105] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df9fa25-3a30-4d27-ae7c-fe7f569ba91f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.305302] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52588e91-9ec9-417c-8d59-ad6618266051, 'name': SearchDatastore_Task, 'duration_secs': 0.015432} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.306383] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bca2a9e-7c4f-4720-a8a9-f628949000c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.311910] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22500bce-1632-4e8e-b758-241beb9b044c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.316859] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 974.316859] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a7e5f3-70ec-5dc8-50b5-3621e2547162" [ 974.316859] env[62109]: _type = "Task" [ 974.316859] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.327545] env[62109]: DEBUG nova.compute.provider_tree [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.334334] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a7e5f3-70ec-5dc8-50b5-3621e2547162, 'name': SearchDatastore_Task, 'duration_secs': 0.009574} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.334568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.334846] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 974.335548] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b656892-fb0d-46d8-b116-13e94fbb248a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.343169] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 974.343169] env[62109]: value = "task-1116899" [ 974.343169] env[62109]: _type = "Task" [ 974.343169] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.350863] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.609824] env[62109]: DEBUG oslo_vmware.api [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.588274} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.610269] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.610331] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 974.610564] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 974.610817] env[62109]: INFO nova.compute.manager [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 1.65 seconds to destroy the instance on the hypervisor. [ 974.611139] env[62109]: DEBUG oslo.service.loopingcall [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.611359] env[62109]: DEBUG nova.compute.manager [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 974.611484] env[62109]: DEBUG nova.network.neutron [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 974.664279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64452616-d475-4ac1-8e90-681163a08e55 tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.420s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.671078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.671486] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.672227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.672227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.672227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.678510] env[62109]: INFO nova.compute.manager [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Terminating instance [ 974.683309] env[62109]: DEBUG nova.compute.manager [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 974.683571] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 974.684810] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfb7887-6b13-4d27-adb2-eb257c1a43ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.691375] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116896, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.698104] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 974.698455] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2c56e94-fca0-4da3-9e28-389aa713db1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.703818] env[62109]: DEBUG oslo_vmware.api [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116897, 'name': PowerOnVM_Task, 'duration_secs': 0.481667} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.704561] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 974.704769] env[62109]: INFO nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Took 10.47 seconds to spawn the instance on the hypervisor. [ 974.704951] env[62109]: DEBUG nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 974.705843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3209c2b7-c245-4ff2-b4e5-609cea6645f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.710012] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 974.710012] env[62109]: value = "task-1116900" [ 974.710012] env[62109]: _type = "Task" [ 974.710012] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.719582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.726720] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.753031] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116898, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.855633] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116899, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.868960] env[62109]: DEBUG nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 974.869331] env[62109]: DEBUG nova.compute.provider_tree [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 117 to 118 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 974.869629] env[62109]: DEBUG nova.compute.provider_tree [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.072072] env[62109]: DEBUG nova.network.neutron [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updated VIF entry in instance network info cache for port e5781b49-4005-4203-8390-dc6af21b6eda. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 975.072631] env[62109]: DEBUG nova.network.neutron [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.137897] env[62109]: DEBUG nova.compute.manager [req-80ec8ba4-4537-4329-862d-9454c5dd0850 req-b809db42-cc32-4436-9c76-0951f23fec30 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Received event network-vif-deleted-7c4891b0-c525-4571-aa3b-47cc9a42d8ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.137897] env[62109]: INFO nova.compute.manager [req-80ec8ba4-4537-4329-862d-9454c5dd0850 req-b809db42-cc32-4436-9c76-0951f23fec30 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Neutron deleted interface 7c4891b0-c525-4571-aa3b-47cc9a42d8ac; detaching it from the instance and deleting it from the info cache [ 975.137897] env[62109]: DEBUG nova.network.neutron [req-80ec8ba4-4537-4329-862d-9454c5dd0850 req-b809db42-cc32-4436-9c76-0951f23fec30 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.184527] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116896, 'name': CreateVM_Task, 'duration_secs': 0.609452} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.184711] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 975.185419] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.187036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.187036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.187036] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd230dc-20c0-4c5c-8a7c-c744f4dd61fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.191760] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 975.191760] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526314c0-ed07-e77d-54d6-67730789bd9d" [ 975.191760] env[62109]: _type = "Task" [ 975.191760] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.199903] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526314c0-ed07-e77d-54d6-67730789bd9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.219076] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116900, 'name': PowerOffVM_Task, 'duration_secs': 0.49508} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.223317] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 975.223543] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 975.225319] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c09c704a-70df-4062-858f-e41dc76478e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.228130] env[62109]: INFO nova.compute.manager [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Took 39.89 seconds to build instance. [ 975.229142] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 975.229535] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ad49a8b-53f3-4323-a411-62da445d1384 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.236983] env[62109]: DEBUG oslo_vmware.api [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 975.236983] env[62109]: value = "task-1116901" [ 975.236983] env[62109]: _type = "Task" [ 975.236983] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.246721] env[62109]: DEBUG oslo_vmware.api [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.254465] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572254} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.254758] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 975.255444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adce593-dc7c-44f2-aace-6741019b749c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.282815] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.283304] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70d01bfe-69ed-4eb1-a04c-24d5ad7f6b5c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.305921] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 975.305921] env[62109]: value = "task-1116903" [ 975.305921] env[62109]: _type = "Task" [ 975.305921] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.313811] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.353737] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781489} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.354025] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 975.354339] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.354627] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-849d24ef-3949-4b22-a52a-cb832b04924e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.361676] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 975.361676] env[62109]: value = "task-1116904" [ 975.361676] env[62109]: _type = "Task" [ 975.361676] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.370573] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116904, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.375299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.433s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.377438] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.426s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.379177] env[62109]: INFO nova.compute.claims [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.395819] env[62109]: INFO nova.scheduler.client.report [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Deleted allocations for instance e7e232c4-a2cb-44eb-8ee3-11fc12ee152a [ 975.429569] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 975.429814] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 975.429999] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleting the datastore file [datastore1] 50c93e9e-5af6-489e-ac8a-29b8a6777a4e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.430281] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee11dd58-d551-4839-980c-f85ef3706658 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.438175] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for the task: (returnval){ [ 975.438175] env[62109]: value = "task-1116905" [ 975.438175] env[62109]: _type = "Task" [ 975.438175] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.446567] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.578341] env[62109]: DEBUG oslo_concurrency.lockutils [req-67bbe779-0a27-47bd-981d-0e1d2f0b5f21 req-78a9877f-5807-40dd-b3f4-b178c8b43a5f service nova] Releasing lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.613184] env[62109]: DEBUG nova.network.neutron [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.640807] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c06f86d-524c-43a1-8814-735511b325e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.651159] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38fab69-ed57-427b-b777-a4f01221505a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.684761] env[62109]: DEBUG nova.compute.manager [req-80ec8ba4-4537-4329-862d-9454c5dd0850 req-b809db42-cc32-4436-9c76-0951f23fec30 service nova] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Detach interface failed, port_id=7c4891b0-c525-4571-aa3b-47cc9a42d8ac, reason: Instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 975.701848] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526314c0-ed07-e77d-54d6-67730789bd9d, 'name': SearchDatastore_Task, 'duration_secs': 0.01971} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.702178] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.702414] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.702656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.702800] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.702979] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.703252] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01ca60ba-3be8-4a37-afce-b9d39a09a687 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.713831] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.714034] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 975.714750] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21d551f1-aa70-427a-876c-a5b4e90fb19b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.721146] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 975.721146] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb6e1-7800-71eb-18f2-4b4f6598e91d" [ 975.721146] env[62109]: _type = "Task" [ 975.721146] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.729315] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb6e1-7800-71eb-18f2-4b4f6598e91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.729778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7dfe83ab-00e0-4734-bb01-a6063b36c2fa tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.401s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.747025] env[62109]: DEBUG oslo_vmware.api [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1116901, 'name': PowerOnVM_Task, 'duration_secs': 0.434249} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.747452] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 975.747569] env[62109]: DEBUG nova.compute.manager [None req-2196a176-06af-4735-b370-42acc5507a10 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 975.748379] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e523e4-7854-402f-8b84-2700599c1cc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.817012] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116903, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.874406] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116904, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148436} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.874718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.875522] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad7f0cc-1a2d-4afc-9ea3-9b36990544f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.900611] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.902928] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b94ff30e-ea7a-4acf-afb4-60eee91b12a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.918051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3dbff0ae-e05c-43bc-bfb2-9704833cc147 tempest-ServerShowV257Test-1134201852 tempest-ServerShowV257Test-1134201852-project-member] Lock "e7e232c4-a2cb-44eb-8ee3-11fc12ee152a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.877s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.925871] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 975.925871] env[62109]: value = "task-1116906" [ 975.925871] env[62109]: _type = "Task" [ 975.925871] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.935894] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116906, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.949074] env[62109]: DEBUG oslo_vmware.api [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Task: {'id': task-1116905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345136} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.949352] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.949568] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 975.949759] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 975.949933] env[62109]: INFO nova.compute.manager [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Took 1.27 seconds to destroy the instance on the hypervisor. [ 975.950203] env[62109]: DEBUG oslo.service.loopingcall [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.950429] env[62109]: DEBUG nova.compute.manager [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 975.950494] env[62109]: DEBUG nova.network.neutron [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 976.115747] env[62109]: INFO nova.compute.manager [-] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Took 1.50 seconds to deallocate network for instance. [ 976.232184] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52adb6e1-7800-71eb-18f2-4b4f6598e91d, 'name': SearchDatastore_Task, 'duration_secs': 0.029317} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.233041] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14de89ce-d5b5-4e94-a53f-7d020d0be337 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.239061] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 976.239061] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525404b8-fad1-103f-b264-a10b111e5de2" [ 976.239061] env[62109]: _type = "Task" [ 976.239061] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.248428] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525404b8-fad1-103f-b264-a10b111e5de2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.319186] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116903, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.437828] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.500027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d70aef-8279-43c4-98b4-5b59842ae474 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.509894] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Suspending the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 976.510325] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d1bbe504-efc0-4836-b07f-2dca57832eae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.524401] env[62109]: DEBUG oslo_vmware.api [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 976.524401] env[62109]: value = "task-1116907" [ 976.524401] env[62109]: _type = "Task" [ 976.524401] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.540443] env[62109]: DEBUG oslo_vmware.api [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116907, 'name': SuspendVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.622869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.683108] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6947de4-8051-4ca8-a25c-ab31c0b19d48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.691642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ea8453-8387-4d54-a545-c573ec5f5245 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.727915] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94640e3a-fb97-470b-85fb-92722f2e03ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.737038] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a098cc-0914-484a-816f-5837dc3ccc90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.750238] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525404b8-fad1-103f-b264-a10b111e5de2, 'name': SearchDatastore_Task, 'duration_secs': 0.033589} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.758745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.759080] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 976.759644] env[62109]: DEBUG nova.compute.provider_tree [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.760910] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64fe4ab2-2360-4347-8972-9f51eb93cdc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.767897] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 976.767897] env[62109]: value = "task-1116908" [ 976.767897] env[62109]: _type = "Task" [ 976.767897] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.777439] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.817661] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116903, 'name': ReconfigVM_Task, 'duration_secs': 1.261927} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.818069] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.819155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee4a6ac-90fe-43fa-8f97-d9538c7e557e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.850960] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb1bc019-d94d-4eeb-bdac-92b215841ff4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.866999] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 976.866999] env[62109]: value = "task-1116909" [ 976.866999] env[62109]: _type = "Task" [ 976.866999] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.875865] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116909, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.935877] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116906, 'name': ReconfigVM_Task, 'duration_secs': 0.865169} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.937158] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.937158] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa252f06-bdf6-43df-af61-fcba3e9a944d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.943754] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 976.943754] env[62109]: value = "task-1116910" [ 976.943754] env[62109]: _type = "Task" [ 976.943754] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.952595] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116910, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.996176] env[62109]: DEBUG nova.network.neutron [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.035593] env[62109]: DEBUG oslo_vmware.api [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116907, 'name': SuspendVM_Task} progress is 62%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.168383] env[62109]: DEBUG nova.compute.manager [req-2e645e92-32d6-4907-a9ab-b1e7a62464b1 req-a8eb793d-7670-4bf8-9541-1c9c77bbf02b service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-vif-deleted-a9a12ab6-6933-4a0d-969e-48319fbc9121 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.168651] env[62109]: DEBUG nova.compute.manager [req-2e645e92-32d6-4907-a9ab-b1e7a62464b1 req-a8eb793d-7670-4bf8-9541-1c9c77bbf02b service nova] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Received event network-vif-deleted-e7859833-4b92-4db6-9be7-2aa396869294 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.264903] env[62109]: DEBUG nova.scheduler.client.report [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.282453] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116908, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.378370] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116909, 'name': ReconfigVM_Task, 'duration_secs': 0.225655} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.378705] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 977.379022] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aeb211f3-90ce-4eae-97e1-f931951d5c7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.387450] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 977.387450] env[62109]: value = "task-1116911" [ 977.387450] env[62109]: _type = "Task" [ 977.387450] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.396505] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.453552] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116910, 'name': Rename_Task, 'duration_secs': 0.200566} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.453744] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 977.453869] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73d8e135-a500-4c6a-abb4-086d1f614857 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.460168] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 977.460168] env[62109]: value = "task-1116912" [ 977.460168] env[62109]: _type = "Task" [ 977.460168] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.467972] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.498706] env[62109]: INFO nova.compute.manager [-] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Took 1.55 seconds to deallocate network for instance. [ 977.535959] env[62109]: DEBUG oslo_vmware.api [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116907, 'name': SuspendVM_Task, 'duration_secs': 0.707525} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.536663] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Suspended the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 977.536663] env[62109]: DEBUG nova.compute.manager [None req-8e9f3149-4288-4c5a-8123-6bdbaa8a63cc tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.537492] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279e7bea-af89-476a-85eb-2988fae45802 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.774954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.775500] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 977.778484] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.590s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.780039] env[62109]: INFO nova.compute.claims [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.788892] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.979372} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.788892] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 977.789150] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.789242] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2315941-b92c-43ca-8a1d-a1de34fb8754 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.796325] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 977.796325] env[62109]: value = "task-1116913" [ 977.796325] env[62109]: _type = "Task" [ 977.796325] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.804573] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.898049] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116911, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.970837] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.005155] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.289081] env[62109]: DEBUG nova.compute.utils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.290702] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 978.290959] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 978.307670] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.332020] env[62109]: DEBUG nova.policy [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2988618e18934aa6b85d2ea288917ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275238e3083540aa838de6d5cccf61eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 978.399640] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116911, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.474028] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.608732] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Successfully created port: c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.795077] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 978.806933] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.682478} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.807864] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.808895] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7024bb38-54c2-4943-b7d2-1354ac13c301 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.829952] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.832745] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7eee9f77-6d43-4766-908d-3a5033cbfe8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.853922] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 978.853922] env[62109]: value = "task-1116914" [ 978.853922] env[62109]: _type = "Task" [ 978.853922] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.864097] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116914, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.899492] env[62109]: DEBUG oslo_vmware.api [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116911, 'name': PowerOnVM_Task, 'duration_secs': 1.196355} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.899796] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 978.902620] env[62109]: DEBUG nova.compute.manager [None req-286e2f51-01bb-43f3-b993-0eb7d88acc6f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 978.903241] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6530d7-d66f-4a04-8bf5-839eb5d8f34d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.920950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.921221] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.921448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.921611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.921786] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.926133] env[62109]: INFO nova.compute.manager [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Terminating instance [ 978.928777] env[62109]: DEBUG nova.compute.manager [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 978.929092] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.930335] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e5f376-ad49-417c-983c-f7464d1db1c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.943187] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 978.944155] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0e47d9c-e6a0-4cfb-b023-2a70237b204f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.976773] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.010043] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 979.010291] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 979.010480] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleting the datastore file [datastore1] 35a13db2-f645-4634-86e0-7e9a6a24fc66 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.010854] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a386e2b-0a7f-4810-ae1e-b7116c852b80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.018080] env[62109]: DEBUG oslo_vmware.api [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 979.018080] env[62109]: value = "task-1116916" [ 979.018080] env[62109]: _type = "Task" [ 979.018080] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.028263] env[62109]: DEBUG oslo_vmware.api [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.068863] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8e9f18-7dc3-4e3a-b2a2-f24ad8e954e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.076998] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca23a2d-9fb8-4f1c-a601-144dbc0d292c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.109198] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a48faac-cc2f-427a-a767-83b8b38ec300 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.116499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22bb69e-c6eb-444a-9742-fdde2930fd38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.129550] env[62109]: DEBUG nova.compute.provider_tree [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.364212] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116914, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.474188] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.528526] env[62109]: DEBUG oslo_vmware.api [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197138} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.528784] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.528972] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 979.529174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 979.529350] env[62109]: INFO nova.compute.manager [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Took 0.60 seconds to destroy the instance on the hypervisor. [ 979.529671] env[62109]: DEBUG oslo.service.loopingcall [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.529877] env[62109]: DEBUG nova.compute.manager [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 979.529959] env[62109]: DEBUG nova.network.neutron [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 979.632652] env[62109]: DEBUG nova.scheduler.client.report [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.808086] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 979.855573] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.855924] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.856682] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.856764] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.856852] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.857060] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.858260] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.858260] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.858260] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.858260] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.858260] env[62109]: DEBUG nova.virt.hardware [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.860612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cee382b-1c27-4156-b980-2c62ab357138 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.875747] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d86f54d-e3c6-416f-8926-d49aa4d95b5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.879824] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116914, 'name': ReconfigVM_Task, 'duration_secs': 0.534915} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.880124] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfigured VM instance instance-00000059 to attach disk [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.881116] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0325525-d7e6-4cc7-a918-df2bc4b90bc0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.895187] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 979.895187] env[62109]: value = "task-1116917" [ 979.895187] env[62109]: _type = "Task" [ 979.895187] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.903018] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116917, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.949046] env[62109]: DEBUG nova.compute.manager [req-08bad3a8-a589-4891-8268-68b81ecdd489 req-492b5a03-d52f-4e65-bb53-0aab5a8f042e service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Received event network-vif-deleted-5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 979.949046] env[62109]: INFO nova.compute.manager [req-08bad3a8-a589-4891-8268-68b81ecdd489 req-492b5a03-d52f-4e65-bb53-0aab5a8f042e service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Neutron deleted interface 5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3; detaching it from the instance and deleting it from the info cache [ 979.949201] env[62109]: DEBUG nova.network.neutron [req-08bad3a8-a589-4891-8268-68b81ecdd489 req-492b5a03-d52f-4e65-bb53-0aab5a8f042e service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.974077] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.003036] env[62109]: INFO nova.compute.manager [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Unrescuing [ 980.003375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.003587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.003787] env[62109]: DEBUG nova.network.neutron [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.016516] env[62109]: DEBUG nova.compute.manager [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Received event network-vif-plugged-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.016743] env[62109]: DEBUG oslo_concurrency.lockutils [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] Acquiring lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.016993] env[62109]: DEBUG oslo_concurrency.lockutils [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.017502] env[62109]: DEBUG oslo_concurrency.lockutils [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.017715] env[62109]: DEBUG nova.compute.manager [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] No waiting events found dispatching network-vif-plugged-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.017894] env[62109]: WARNING nova.compute.manager [req-400aa1ea-a904-4a87-8fda-897cba78a5c5 req-55ee22e5-3044-4127-9d6f-b98ff6e61ae6 service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Received unexpected event network-vif-plugged-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 for instance with vm_state building and task_state spawning. [ 980.104177] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Successfully updated port: c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.137515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.138065] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 980.140761] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.913s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.141204] env[62109]: DEBUG nova.objects.instance [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lazy-loading 'resources' on Instance uuid 0392a352-74e5-4551-9319-eebbc5e20d3b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.372025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.372025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.407761] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116917, 'name': Rename_Task, 'duration_secs': 0.164051} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.407761] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 980.407893] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8e7bb1e-5aa2-4396-9810-f7b4a65f4205 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.414464] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 980.414464] env[62109]: value = "task-1116918" [ 980.414464] env[62109]: _type = "Task" [ 980.414464] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.427700] env[62109]: DEBUG nova.network.neutron [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.429358] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.452160] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d699ac8-de62-4d84-9c2a-560b5b282dbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.463066] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992f994b-435f-4047-99a6-275a1b888c7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.482245] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.495157] env[62109]: DEBUG nova.compute.manager [req-08bad3a8-a589-4891-8268-68b81ecdd489 req-492b5a03-d52f-4e65-bb53-0aab5a8f042e service nova] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Detach interface failed, port_id=5aec6f1b-0d09-4a06-b4c6-2bbbf5e36fc3, reason: Instance 35a13db2-f645-4634-86e0-7e9a6a24fc66 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 980.609105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.609105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.609105] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.643446] env[62109]: DEBUG nova.compute.utils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.648501] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 980.648871] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 980.713427] env[62109]: DEBUG nova.policy [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 980.746211] env[62109]: DEBUG nova.network.neutron [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.875591] env[62109]: DEBUG nova.compute.utils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.928261] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116918, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.930938] env[62109]: INFO nova.compute.manager [-] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Took 1.40 seconds to deallocate network for instance. [ 980.935125] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c724ba7-845d-4a79-8199-dc76bfdfb2fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.948092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91af949d-2957-4ef4-a7e8-5f537014c13d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.985892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa6d319-23f0-4f8a-b2d9-bca2824bbd1e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.994385] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.997633] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0147566-c019-4f8a-b733-9e1951f245e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.011434] env[62109]: DEBUG nova.compute.provider_tree [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.146519] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.149313] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 981.165240] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Successfully created port: bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.252831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.253583] env[62109]: DEBUG nova.objects.instance [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'flavor' on Instance uuid 39c17e34-c8c0-4a66-8d22-717efcb984bc {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.288353] env[62109]: DEBUG nova.network.neutron [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Updating instance_info_cache with network_info: [{"id": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "address": "fa:16:3e:de:c4:de", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bbdfe0-c3", "ovs_interfaceid": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.379093] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.428761] env[62109]: DEBUG oslo_vmware.api [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116918, 'name': PowerOnVM_Task, 'duration_secs': 0.550513} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.429061] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 981.429270] env[62109]: INFO nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Took 9.95 seconds to spawn the instance on the hypervisor. [ 981.429450] env[62109]: DEBUG nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.430287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6e59bf-70fe-4926-9c1b-7eb5dc88fe6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.443232] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.493835] env[62109]: DEBUG oslo_vmware.api [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116912, 'name': PowerOnVM_Task, 'duration_secs': 3.669034} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.494186] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 981.494440] env[62109]: INFO nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Took 12.42 seconds to spawn the instance on the hypervisor. [ 981.494650] env[62109]: DEBUG nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.495720] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c842051b-6029-4e91-8954-2e397d10798f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.513879] env[62109]: DEBUG nova.scheduler.client.report [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.764277] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ee3117-7c41-4d36-b7bf-d7444ae6a2e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.786733] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 981.786988] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c09220f-1611-4589-b267-44a8feb797f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.790378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.790697] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Instance network_info: |[{"id": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "address": "fa:16:3e:de:c4:de", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bbdfe0-c3", "ovs_interfaceid": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 981.791081] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:c4:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.798587] env[62109]: DEBUG oslo.service.loopingcall [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.800067] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 981.800404] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 981.800404] env[62109]: value = "task-1116919" [ 981.800404] env[62109]: _type = "Task" [ 981.800404] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.800741] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f58b9aa7-e9cb-4e33-b160-d8356f757aa2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.823092] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.824305] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.824305] env[62109]: value = "task-1116920" [ 981.824305] env[62109]: _type = "Task" [ 981.824305] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.831759] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116920, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.949324] env[62109]: INFO nova.compute.manager [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Took 27.65 seconds to build instance. [ 982.012760] env[62109]: INFO nova.compute.manager [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Took 29.79 seconds to build instance. [ 982.018885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.021448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.399s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.021648] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.024231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.018s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.024231] env[62109]: DEBUG nova.objects.instance [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lazy-loading 'resources' on Instance uuid 50c93e9e-5af6-489e-ac8a-29b8a6777a4e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.043804] env[62109]: INFO nova.scheduler.client.report [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted allocations for instance 0392a352-74e5-4551-9319-eebbc5e20d3b [ 982.046986] env[62109]: INFO nova.scheduler.client.report [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted allocations for instance 8b63f9a1-5639-48b2-b0a9-30380835bef2 [ 982.060351] env[62109]: DEBUG nova.compute.manager [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Received event network-changed-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 982.060820] env[62109]: DEBUG nova.compute.manager [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Refreshing instance network info cache due to event network-changed-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 982.060820] env[62109]: DEBUG oslo_concurrency.lockutils [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] Acquiring lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.061129] env[62109]: DEBUG oslo_concurrency.lockutils [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] Acquired lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.061129] env[62109]: DEBUG nova.network.neutron [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Refreshing network info cache for port c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.165650] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 982.257157] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.257444] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.257606] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.257794] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.257946] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.258113] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.258330] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.258494] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.258666] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.258830] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.259046] env[62109]: DEBUG nova.virt.hardware [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.259930] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c8e816-edca-4566-8ae8-bd839a481373 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.268551] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6be447-120f-47b4-b418-642f15072f8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.324188] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116919, 'name': PowerOffVM_Task, 'duration_secs': 0.240119} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.324481] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 982.329771] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfiguring VM instance instance-0000004f to detach disk 2002 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 982.330451] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9cd6147-e9e2-40c4-b147-f1f42c01c30a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.354051] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116920, 'name': CreateVM_Task, 'duration_secs': 0.428116} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.355360] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 982.355714] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 982.355714] env[62109]: value = "task-1116921" [ 982.355714] env[62109]: _type = "Task" [ 982.355714] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.356363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.356527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.356850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 982.357159] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-673ad762-1b8b-4f2b-aea8-4ecb60f3c2bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.367737] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116921, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.369037] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 982.369037] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f1c64e-aef1-af02-c3e3-7d87889f1280" [ 982.369037] env[62109]: _type = "Task" [ 982.369037] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.376643] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f1c64e-aef1-af02-c3e3-7d87889f1280, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.451546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.452394] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.452394] env[62109]: INFO nova.compute.manager [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Attaching volume 52569525-8081-4e16-8b50-2801101cb579 to /dev/sdb [ 982.454876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-75297789-11eb-4a76-aeaf-eebf8e121bc7 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.159s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.496065] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d401231-047d-4170-b4c2-ccb467a5ff6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.503949] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338e1939-bdff-4e1f-9cc5-82384ad14412 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.518095] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2e9a2d7-a5fa-4cea-93a7-5b6689e6054b tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.306s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.518476] env[62109]: DEBUG nova.virt.block_device [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating existing volume attachment record: 8637adc0-cc8c-40e6-bbce-a50bf0804bd1 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 982.560823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9924b9f8-5bee-4d50-9a5f-4589f1920de2 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "8b63f9a1-5639-48b2-b0a9-30380835bef2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.607s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.561175] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e058cef2-d83b-4f77-b329-1149399d8f6c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "0392a352-74e5-4551-9319-eebbc5e20d3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.388s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.824125] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db816d3-4dec-43ec-987d-edf3ba574f72 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.833057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8820fbe-3809-4cbb-99f4-730e1520e625 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.867647] env[62109]: DEBUG nova.network.neutron [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Updated VIF entry in instance network info cache for port c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 982.868095] env[62109]: DEBUG nova.network.neutron [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Updating instance_info_cache with network_info: [{"id": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "address": "fa:16:3e:de:c4:de", "network": {"id": "9b805542-3ae8-423b-9b1d-70116ea546bb", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1529116057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275238e3083540aa838de6d5cccf61eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bbdfe0-c3", "ovs_interfaceid": "c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.875486] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2624545f-d38f-4ac3-b5ff-6a95bd3b40be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.886957] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116921, 'name': ReconfigVM_Task, 'duration_secs': 0.293396} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.892651] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfigured VM instance instance-0000004f to detach disk 2002 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 982.892916] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 982.893502] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f1c64e-aef1-af02-c3e3-7d87889f1280, 'name': SearchDatastore_Task, 'duration_secs': 0.017511} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.893801] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29739536-6f00-46cc-a266-cac46f9340a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.897227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.897227] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.897227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.897227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.897227] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.897542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c59518-28c9-4010-b553-d1422c28a248 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.902742] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c61bb572-4724-4fd5-a6ff-7ecc264cd83b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.917646] env[62109]: DEBUG nova.compute.provider_tree [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.920907] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.921151] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 982.922246] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 982.922246] env[62109]: value = "task-1116924" [ 982.922246] env[62109]: _type = "Task" [ 982.922246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.923032] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-842e6b7f-da8a-42c0-8af2-73e45af3dc62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.931731] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 982.931731] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e719d-7975-05c9-300a-0e06a73ac0ef" [ 982.931731] env[62109]: _type = "Task" [ 982.931731] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.935450] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116924, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.943882] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e719d-7975-05c9-300a-0e06a73ac0ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.050637] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Successfully updated port: bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.058230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.058528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.158108] env[62109]: DEBUG nova.compute.manager [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-vif-plugged-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.158108] env[62109]: DEBUG oslo_concurrency.lockutils [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.158108] env[62109]: DEBUG oslo_concurrency.lockutils [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.158108] env[62109]: DEBUG oslo_concurrency.lockutils [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.158108] env[62109]: DEBUG nova.compute.manager [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] No waiting events found dispatching network-vif-plugged-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.158423] env[62109]: WARNING nova.compute.manager [req-0b767379-5f3f-463a-8efc-a6761d7f68f8 req-bfa4f5dc-8fee-42b1-9c9f-b579116c50f5 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received unexpected event network-vif-plugged-bef2387d-4fe5-4a29-89fe-d990d0e93b2a for instance with vm_state building and task_state spawning. [ 983.380048] env[62109]: DEBUG oslo_concurrency.lockutils [req-26b4c65e-76c3-4b3c-baa5-5d3efdcbc561 req-243e5be7-e55e-4e3a-98b7-7165a090b3ce service nova] Releasing lock "refresh_cache-dce54763-ad3a-40d3-8f72-f0a1aefaf086" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.424409] env[62109]: DEBUG nova.scheduler.client.report [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.437815] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116924, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.447178] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529e719d-7975-05c9-300a-0e06a73ac0ef, 'name': SearchDatastore_Task, 'duration_secs': 0.022892} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.447948] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f6841bf-1110-4837-889e-9a25a71a3ade {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.453011] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 983.453011] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e5f29-39e5-54ef-c25e-0d32a2d25960" [ 983.453011] env[62109]: _type = "Task" [ 983.453011] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.461891] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e5f29-39e5-54ef-c25e-0d32a2d25960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.516010] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.516279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.553590] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.553807] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.553898] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.561755] env[62109]: DEBUG nova.compute.utils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 983.833138] env[62109]: DEBUG nova.compute.manager [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 983.934059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.937023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.493s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.937023] env[62109]: DEBUG nova.objects.instance [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'resources' on Instance uuid 35a13db2-f645-4634-86e0-7e9a6a24fc66 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.944466] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116924, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.958560] env[62109]: INFO nova.scheduler.client.report [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Deleted allocations for instance 50c93e9e-5af6-489e-ac8a-29b8a6777a4e [ 983.964438] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e5f29-39e5-54ef-c25e-0d32a2d25960, 'name': SearchDatastore_Task, 'duration_secs': 0.011184} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.967126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.967467] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] dce54763-ad3a-40d3-8f72-f0a1aefaf086/dce54763-ad3a-40d3-8f72-f0a1aefaf086.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 983.968186] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f757c6a-5380-406a-b60f-66bb2bca501d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.974588] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 983.974588] env[62109]: value = "task-1116926" [ 983.974588] env[62109]: _type = "Task" [ 983.974588] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.983129] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116926, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.018930] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 984.021861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "f72ca981-1bba-44d9-854f-7677f1a0c764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.022181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.064823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.103775] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.320468] env[62109]: DEBUG nova.network.neutron [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.351961] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.442660] env[62109]: DEBUG oslo_vmware.api [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1116924, 'name': PowerOnVM_Task, 'duration_secs': 1.055208} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.443597] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 984.443969] env[62109]: DEBUG nova.compute.manager [None req-b15512d0-2b85-424a-9286-2c8c1fdbb5a8 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 984.444883] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76ad98e-6b83-4640-849b-fc7acea58dd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.476047] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4ae47de1-53bd-43e6-bcd2-68cba09fd75f tempest-ServersTestMultiNic-1003958745 tempest-ServersTestMultiNic-1003958745-project-member] Lock "50c93e9e-5af6-489e-ac8a-29b8a6777a4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.804s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.488172] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116926, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.526765] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 984.545609] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.682934] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cb39c7-213b-4584-a5e2-294c2a64cf6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.690877] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e2e11b-4b51-4758-9564-004876d72dde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.721804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6eb8d9-0b5a-4b8a-a22c-ad75c7091b65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.729018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc72c39b-6a04-4300-bb1f-799681908b42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.741873] env[62109]: DEBUG nova.compute.provider_tree [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.824383] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.824383] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Instance network_info: |[{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 984.824627] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:c9:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b8137fc-f23d-49b1-b19c-3123a5588f34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bef2387d-4fe5-4a29-89fe-d990d0e93b2a', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.832064] env[62109]: DEBUG oslo.service.loopingcall [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.832283] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.832502] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fbd42b3-c3e3-46c6-bd34-248e0d9c2fcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.852737] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.852737] env[62109]: value = "task-1116927" [ 984.852737] env[62109]: _type = "Task" [ 984.852737] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.860212] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116927, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.985755] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116926, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583909} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.986043] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] dce54763-ad3a-40d3-8f72-f0a1aefaf086/dce54763-ad3a-40d3-8f72-f0a1aefaf086.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 984.986290] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.986552] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0415f435-fbd3-4475-9353-2189ca0b1257 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.994681] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 984.994681] env[62109]: value = "task-1116928" [ 984.994681] env[62109]: _type = "Task" [ 984.994681] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.004178] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.052328] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.128939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.129286] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.129522] env[62109]: INFO nova.compute.manager [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Attaching volume cb4727ca-8170-4c34-833f-c48c9f402ff9 to /dev/sdb [ 985.171333] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae90157a-d054-4180-b355-1d4578b17e8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.179657] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db3ca65-3674-4de4-9ca1-5cd45cc65941 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.193408] env[62109]: DEBUG nova.virt.block_device [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating existing volume attachment record: e2722066-4339-48d9-9cf2-29311ad5f823 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 985.198040] env[62109]: DEBUG nova.compute.manager [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.198211] env[62109]: DEBUG nova.compute.manager [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing instance network info cache due to event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 985.198428] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.198575] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.198969] env[62109]: DEBUG nova.network.neutron [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 985.265443] env[62109]: ERROR nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [req-d7540bad-a6fb-48fb-b3c1-89d017553a82] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d7540bad-a6fb-48fb-b3c1-89d017553a82"}]} [ 985.285180] env[62109]: DEBUG nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 985.304650] env[62109]: DEBUG nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 985.308021] env[62109]: DEBUG nova.compute.provider_tree [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.316827] env[62109]: DEBUG nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 985.340023] env[62109]: DEBUG nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 985.368636] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116927, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.508229] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1604} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.508514] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.511346] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1311283-402b-489b-be18-e976efd79806 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.541592] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] dce54763-ad3a-40d3-8f72-f0a1aefaf086/dce54763-ad3a-40d3-8f72-f0a1aefaf086.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.544540] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c793f586-cfa9-43db-8f2b-130904592282 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.577138] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 985.577138] env[62109]: value = "task-1116931" [ 985.577138] env[62109]: _type = "Task" [ 985.577138] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.586298] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116931, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.681254] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3a322f-2b80-4c6f-8e9d-8a567e884f2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.688563] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aada400d-c889-4d43-bd07-b62d297e044d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.721914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248217b6-d759-4168-a90d-c3c2d85217e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.731462] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5970fda4-5363-4137-8945-e424cb5e1f60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.746314] env[62109]: DEBUG nova.compute.provider_tree [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.866233] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116927, 'name': CreateVM_Task, 'duration_secs': 0.656679} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.866405] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 985.867098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.867275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.867597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.867847] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ab5c0e2-d1ee-4232-a954-4e930282b800 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.872650] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 985.872650] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522f9ded-c07a-1926-a5a7-558b8d1b946a" [ 985.872650] env[62109]: _type = "Task" [ 985.872650] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.882029] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522f9ded-c07a-1926-a5a7-558b8d1b946a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.923968] env[62109]: DEBUG nova.network.neutron [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updated VIF entry in instance network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 985.924369] env[62109]: DEBUG nova.network.neutron [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.087043] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116931, 'name': ReconfigVM_Task, 'duration_secs': 0.397128} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.087344] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Reconfigured VM instance instance-0000005a to attach disk [datastore2] dce54763-ad3a-40d3-8f72-f0a1aefaf086/dce54763-ad3a-40d3-8f72-f0a1aefaf086.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.091594] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afa1b7ea-d7e0-40ca-afce-a276d7125a7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.099996] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 986.099996] env[62109]: value = "task-1116932" [ 986.099996] env[62109]: _type = "Task" [ 986.099996] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.115056] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116932, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.281531] env[62109]: DEBUG nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 986.281827] env[62109]: DEBUG nova.compute.provider_tree [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 122 to 123 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 986.281974] env[62109]: DEBUG nova.compute.provider_tree [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.384343] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522f9ded-c07a-1926-a5a7-558b8d1b946a, 'name': SearchDatastore_Task, 'duration_secs': 0.009145} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.384723] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.385013] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 986.385308] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.385499] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.385722] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 986.386033] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3edbf24-9a56-4ff6-a6c0-7db2bdf7b4eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.397064] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 986.398067] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 986.398067] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df72723-fd61-4780-9c84-60defc36e00d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.403827] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 986.403827] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52841438-7fab-188b-e8d1-2395b0154aa3" [ 986.403827] env[62109]: _type = "Task" [ 986.403827] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.411601] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52841438-7fab-188b-e8d1-2395b0154aa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.426630] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b7dfd18-debf-490e-8c1d-76fcd61e252e req-ef5d095d-b21b-49a4-a1eb-1908ac932090 service nova] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.611044] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116932, 'name': Rename_Task, 'duration_secs': 0.158326} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.611365] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 986.611728] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a938f116-64e4-4978-85a8-5114d30d3ad3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.617567] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 986.617567] env[62109]: value = "task-1116933" [ 986.617567] env[62109]: _type = "Task" [ 986.617567] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.625550] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.791113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.854s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.793388] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.442s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.816606] env[62109]: INFO nova.scheduler.client.report [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 35a13db2-f645-4634-86e0-7e9a6a24fc66 [ 986.914855] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52841438-7fab-188b-e8d1-2395b0154aa3, 'name': SearchDatastore_Task, 'duration_secs': 0.028086} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.915651] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaee5f5a-0b86-45a7-a219-67412a86208f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.920781] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 986.920781] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524eb035-22be-4524-6c06-0507d7045f21" [ 986.920781] env[62109]: _type = "Task" [ 986.920781] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.929898] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524eb035-22be-4524-6c06-0507d7045f21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.075181] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 987.075181] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 987.075505] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cce6f9-7520-426f-ad2d-af70bfa73328 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.099190] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575febe6-ee3e-40d5-a066-266dfc04090e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.126406] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.129882] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b64b8723-f944-4bc6-bbc3-816575523dc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.148890] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116933, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.150344] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 987.150344] env[62109]: value = "task-1116934" [ 987.150344] env[62109]: _type = "Task" [ 987.150344] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.158195] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116934, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.228958] env[62109]: DEBUG nova.compute.manager [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-changed-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.229256] env[62109]: DEBUG nova.compute.manager [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing instance network info cache due to event network-changed-63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 987.229530] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.229741] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.229987] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 987.299242] env[62109]: INFO nova.compute.claims [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.324021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f6a75005-3793-4c05-92a8-7df48c21d6db tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "35a13db2-f645-4634-86e0-7e9a6a24fc66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.402s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.432388] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524eb035-22be-4524-6c06-0507d7045f21, 'name': SearchDatastore_Task, 'duration_secs': 0.023364} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.432678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.432947] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] b5410f60-c5fb-4325-8d42-8745c310a6ca/b5410f60-c5fb-4325-8d42-8745c310a6ca.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 987.433243] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9171fb72-b4ef-4565-af7c-06c82eeffdde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.439465] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 987.439465] env[62109]: value = "task-1116936" [ 987.439465] env[62109]: _type = "Task" [ 987.439465] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.447871] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.639045] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116933, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.661169] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116934, 'name': ReconfigVM_Task, 'duration_secs': 0.36435} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.661587] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to attach disk [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.670839] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee35ea79-2ee7-4e18-be89-251528f4f333 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.693519] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 987.693519] env[62109]: value = "task-1116937" [ 987.693519] env[62109]: _type = "Task" [ 987.693519] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.700315] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.808278] env[62109]: INFO nova.compute.resource_tracker [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating resource usage from migration 31810d44-a7b5-4c62-8d96-6e06b6ac02b1 [ 987.954690] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116936, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.083372] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaeb9f3-1d64-4ab3-ba95-c8d31930d671 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.094794] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f97b67-d5ee-4907-8f66-2d993159ce57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.138031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4a1db3-73e2-411a-aae3-6a6bc2d56880 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.146959] env[62109]: DEBUG oslo_vmware.api [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116933, 'name': PowerOnVM_Task, 'duration_secs': 1.235544} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.149322] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 988.149732] env[62109]: INFO nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Took 8.34 seconds to spawn the instance on the hypervisor. [ 988.150118] env[62109]: DEBUG nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 988.152015] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90da778-62eb-4c31-8a86-5c4e31948458 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.155270] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da412465-7082-4f41-8ea4-b48bf41c64c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.173641] env[62109]: DEBUG nova.compute.provider_tree [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.201592] env[62109]: DEBUG oslo_vmware.api [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116937, 'name': ReconfigVM_Task, 'duration_secs': 0.166874} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.201939] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 988.239846] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updated VIF entry in instance network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 988.240292] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.453626] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623468} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.454015] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] b5410f60-c5fb-4325-8d42-8745c310a6ca/b5410f60-c5fb-4325-8d42-8745c310a6ca.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 988.454322] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.454652] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20166e80-586e-4dfb-addd-246c74821515 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.462675] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 988.462675] env[62109]: value = "task-1116938" [ 988.462675] env[62109]: _type = "Task" [ 988.462675] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.474282] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.682310] env[62109]: DEBUG nova.scheduler.client.report [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 988.687630] env[62109]: INFO nova.compute.manager [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Took 27.75 seconds to build instance. [ 988.742570] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.742867] env[62109]: DEBUG nova.compute.manager [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-changed-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.743067] env[62109]: DEBUG nova.compute.manager [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing instance network info cache due to event network-changed-63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 988.743347] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Acquiring lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.743515] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Acquired lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.743710] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Refreshing network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 988.959319] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.959675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.974988] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100983} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.975347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.976160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b4cf54-a37e-4747-93d1-3eedcfb8390f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.000063] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] b5410f60-c5fb-4325-8d42-8745c310a6ca/b5410f60-c5fb-4325-8d42-8745c310a6ca.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.000336] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3450ddd5-38c5-4497-979b-1749665e4138 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.021054] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 989.021054] env[62109]: value = "task-1116939" [ 989.021054] env[62109]: _type = "Task" [ 989.021054] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.029018] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.188818] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.395s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.189066] env[62109]: INFO nova.compute.manager [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Migrating [ 989.195875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.650s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.197421] env[62109]: INFO nova.compute.claims [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.200042] env[62109]: DEBUG oslo_concurrency.lockutils [None req-29b3086a-1ee6-47a7-b67b-db9f483d2406 tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.275s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.243022] env[62109]: DEBUG nova.objects.instance [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'flavor' on Instance uuid 5842e112-d3ef-4ce9-91cc-198e68d12422 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.462947] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 989.538162] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116939, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.564490] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updated VIF entry in instance network info cache for port 63549817-3bd1-441c-af9c-739682b35cf2. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 989.564996] env[62109]: DEBUG nova.network.neutron [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [{"id": "63549817-3bd1-441c-af9c-739682b35cf2", "address": "fa:16:3e:39:dd:c1", "network": {"id": "959f22f0-cafa-4c00-ab98-5063c97d887f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1191105155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd79b6e383494f2bb88bd4a0e388f18d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63549817-3b", "ovs_interfaceid": "63549817-3bd1-441c-af9c-739682b35cf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.710595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.710807] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.710998] env[62109]: DEBUG nova.network.neutron [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.744829] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 989.745082] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244511', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'name': 'volume-cb4727ca-8170-4c34-833f-c48c9f402ff9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '414ac48f-68bc-4d37-98c0-4bcc9f7f37c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'serial': 'cb4727ca-8170-4c34-833f-c48c9f402ff9'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 989.745960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ca8c22-66f5-4af3-bed6-d815c9447756 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.751959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-068ecaa3-dfa2-4e9f-ae78-b1dbdaf03a17 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.300s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.767573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1604ec-47fe-4d54-8d03-f9e545702704 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.797307] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-cb4727ca-8170-4c34-833f-c48c9f402ff9/volume-cb4727ca-8170-4c34-833f-c48c9f402ff9.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.798242] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc6b0add-6fc6-4e60-a4c9-1b37496ba3ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.817804] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 989.817804] env[62109]: value = "task-1116940" [ 989.817804] env[62109]: _type = "Task" [ 989.817804] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.826860] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.988808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.032186] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116939, 'name': ReconfigVM_Task, 'duration_secs': 0.515128} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.032543] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfigured VM instance instance-0000005b to attach disk [datastore2] b5410f60-c5fb-4325-8d42-8745c310a6ca/b5410f60-c5fb-4325-8d42-8745c310a6ca.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.033368] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c55f5042-4746-413e-9c04-f70dcc1dd746 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.040432] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 990.040432] env[62109]: value = "task-1116941" [ 990.040432] env[62109]: _type = "Task" [ 990.040432] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.048912] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116941, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.067875] env[62109]: DEBUG oslo_concurrency.lockutils [req-27a8c208-66da-4d82-98b3-7031f7b8e8c4 req-0a57abf1-c8be-458d-b3f3-c3a16fee1d64 service nova] Releasing lock "refresh_cache-39c17e34-c8c0-4a66-8d22-717efcb984bc" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.164725] env[62109]: INFO nova.compute.manager [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Rebuilding instance [ 990.207701] env[62109]: DEBUG nova.compute.manager [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.208669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea14de8c-7c64-4d61-a2bd-0e9e89d3ee1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.331122] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.420167] env[62109]: DEBUG nova.compute.manager [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.421415] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95516f7-519e-4913-8b47-618ab8353c68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.475729] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e23dbf5-b283-4256-9ed8-902a5c158416 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.480282] env[62109]: DEBUG nova.network.neutron [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.484902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba8082d-4dda-483f-9f03-50499862c18e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.516495] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487d2337-b4d2-4508-9d4c-638a2f6418d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.524257] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf03e4d6-dd38-49bd-8c31-dfdf493d253e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.538461] env[62109]: DEBUG nova.compute.provider_tree [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 990.550795] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116941, 'name': Rename_Task, 'duration_secs': 0.189756} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.551069] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 990.551302] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a7367ea-7712-485d-a3d1-9c50a9c84cf8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.556802] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 990.556802] env[62109]: value = "task-1116942" [ 990.556802] env[62109]: _type = "Task" [ 990.556802] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.564117] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.725186] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 990.725635] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76d96413-4756-4d74-841f-18841b09bd01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.733830] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 990.733830] env[62109]: value = "task-1116943" [ 990.733830] env[62109]: _type = "Task" [ 990.733830] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.742289] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.828913] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116940, 'name': ReconfigVM_Task, 'duration_secs': 0.732316} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.829316] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-cb4727ca-8170-4c34-833f-c48c9f402ff9/volume-cb4727ca-8170-4c34-833f-c48c9f402ff9.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.834149] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b5ddc33-6c3b-4326-9acc-5c9617b7da30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.849923] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 990.849923] env[62109]: value = "task-1116944" [ 990.849923] env[62109]: _type = "Task" [ 990.849923] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.858695] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.936935] env[62109]: INFO nova.compute.manager [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] instance snapshotting [ 990.941028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d0587d-9e39-4c64-8c12-ab0fc63c9b1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.961533] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aab7640-4a84-41e4-965b-f32dd5ca66ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.984711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.061911] env[62109]: ERROR nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [req-41037516-1b65-479b-a56b-ca705d0a33a2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-41037516-1b65-479b-a56b-ca705d0a33a2"}]} [ 991.070168] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.078174] env[62109]: DEBUG nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 991.091771] env[62109]: DEBUG nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 991.092055] env[62109]: DEBUG nova.compute.provider_tree [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.103087] env[62109]: DEBUG nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 991.121119] env[62109]: DEBUG nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 991.246785] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116943, 'name': PowerOffVM_Task, 'duration_secs': 0.24072} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.246785] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 991.301966] env[62109]: INFO nova.compute.manager [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Detaching volume 52569525-8081-4e16-8b50-2801101cb579 [ 991.335723] env[62109]: INFO nova.virt.block_device [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Attempting to driver detach volume 52569525-8081-4e16-8b50-2801101cb579 from mountpoint /dev/sdb [ 991.335984] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 991.336196] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 991.337104] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a683f70a-2cc3-4cf7-add7-c36258ff4c23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.365493] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73aa7cf-97fb-42af-9785-5dc5c341d7dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.370269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f674c0a2-8c73-4c82-b927-28e261771f8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.380330] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116944, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.382605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329a6ebe-8e42-4a4f-9217-3e80a8f75671 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.386484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8af58c3-7eed-438d-ae3b-21e84ff6c57a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.435027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39f2e67-cbb6-4336-8e41-9f86c973ee33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.435428] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b762ac4-5208-4d49-a106-b53712def40c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.463136] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb946b9-1292-48da-86a8-f967e0c7399c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.470446] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] The volume has not been displaced from its original location: [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 991.480296] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.482113] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 991.482499] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d47d7f7-454a-46e9-9b3e-ad9564ebbd38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.506206] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4fef3e39-0847-4914-907e-4f6bc0aee0d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.526890] env[62109]: DEBUG nova.compute.provider_tree [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.531591] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 991.531591] env[62109]: value = "task-1116945" [ 991.531591] env[62109]: _type = "Task" [ 991.531591] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.536229] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 991.536229] env[62109]: value = "task-1116946" [ 991.536229] env[62109]: _type = "Task" [ 991.536229] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.545738] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.551446] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116946, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.568187] env[62109]: DEBUG oslo_vmware.api [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116942, 'name': PowerOnVM_Task, 'duration_secs': 0.782874} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.568509] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 991.568744] env[62109]: INFO nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Took 9.41 seconds to spawn the instance on the hypervisor. [ 991.568958] env[62109]: DEBUG nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.569990] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1bd13a-beb2-4387-8a7f-c98e19b08186 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.870375] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116944, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.047729] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116945, 'name': ReconfigVM_Task, 'duration_secs': 0.203749} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.050677] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.055546] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116946, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.055818] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b647a9d8-6f2f-4fc8-97cb-64860fecbbe4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.066717] env[62109]: DEBUG nova.scheduler.client.report [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 992.066985] env[62109]: DEBUG nova.compute.provider_tree [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 124 to 125 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 992.067184] env[62109]: DEBUG nova.compute.provider_tree [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.075835] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 992.075835] env[62109]: value = "task-1116947" [ 992.075835] env[62109]: _type = "Task" [ 992.075835] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.096573] env[62109]: INFO nova.compute.manager [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Took 30.95 seconds to build instance. [ 992.101662] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.370511] env[62109]: DEBUG oslo_vmware.api [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116944, 'name': ReconfigVM_Task, 'duration_secs': 1.143622} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.370808] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244511', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'name': 'volume-cb4727ca-8170-4c34-833f-c48c9f402ff9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '414ac48f-68bc-4d37-98c0-4bcc9f7f37c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'serial': 'cb4727ca-8170-4c34-833f-c48c9f402ff9'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 992.540203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c03427c-bbeb-46e3-818d-396a1a5c1f42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.550820] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116946, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.564321] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 992.572491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.377s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.573034] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 992.575856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.523s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.577046] env[62109]: INFO nova.compute.claims [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.589468] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116947, 'name': ReconfigVM_Task, 'duration_secs': 0.174607} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.589682] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 992.603553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b96bab44-13c0-44bd-a6ee-b5faf506588d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.469s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.049951] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116946, 'name': CreateSnapshot_Task, 'duration_secs': 1.371364} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.050262] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 993.051031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2671692-e374-49aa-84e5-a79911259aab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.070638] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 993.071318] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19528b83-b432-4749-a3fd-0357a62d6ca3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.081416] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 993.081416] env[62109]: value = "task-1116948" [ 993.081416] env[62109]: _type = "Task" [ 993.081416] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.084793] env[62109]: DEBUG nova.compute.utils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.089945] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 993.090134] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 993.099977] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.143923] env[62109]: DEBUG nova.policy [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5442deec924240babb834fc704d53cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a363548894df47d5981199004e9884de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 993.400333] env[62109]: DEBUG nova.compute.manager [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.400531] env[62109]: DEBUG nova.compute.manager [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 993.400825] env[62109]: DEBUG oslo_concurrency.lockutils [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.401014] env[62109]: DEBUG oslo_concurrency.lockutils [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.401611] env[62109]: DEBUG nova.network.neutron [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.408632] env[62109]: DEBUG nova.objects.instance [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'flavor' on Instance uuid 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.574024] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 993.574024] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0b4e4c5e-3795-443f-bb04-4f22eca7543f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.587897] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 993.590837] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 993.590837] env[62109]: value = "task-1116949" [ 993.590837] env[62109]: _type = "Task" [ 993.590837] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.602936] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116948, 'name': PowerOffVM_Task, 'duration_secs': 0.242304} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.603054] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 993.603245] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 993.613774] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116949, 'name': CloneVM_Task} progress is 11%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.640443] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Successfully created port: 8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.649915] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 993.652142] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bc6e945-e267-4acc-a4b7-61d65b2e7869 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.659431] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 993.659431] env[62109]: value = "task-1116950" [ 993.659431] env[62109]: _type = "Task" [ 993.659431] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.677017] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 993.677587] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 993.677587] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 993.678315] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236bcd00-08f7-42fd-bcb3-4dc7ab2551ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.699032] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a545a0-d2f6-4fd1-b28c-d072aa400283 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.706473] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 993.706778] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 993.707879] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d15a07-ebf6-45f8-ad5c-0be0d63d976c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.715043] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 993.717774] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b219547-2653-4f10-b6af-6db17861248d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.808459] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 993.808783] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 993.809058] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore2] 5842e112-d3ef-4ce9-91cc-198e68d12422 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.810568] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dcc3104-5e59-4f56-973e-d6f61de0e4b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.817592] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 993.817592] env[62109]: value = "task-1116952" [ 993.817592] env[62109]: _type = "Task" [ 993.817592] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.830383] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.914230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d1bba5bf-ebdc-428b-8a2a-189eeee6545a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.785s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.931817] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e53d0d9-3648-4ec8-aa0e-080c2302e1a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.940420] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e70fe5-2ac1-47f8-b616-cb946c9ed033 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.978440] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136475c0-08a8-4036-9594-2699deedbd38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.987938] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d92c37-1294-47e6-9b8a-cfcfb7079cfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.003958] env[62109]: DEBUG nova.compute.provider_tree [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.109967] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116949, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.115519] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.115803] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.115920] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.116119] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.116292] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.116497] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.116705] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.117556] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.118977] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.118977] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.118977] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.127133] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bade6c0-b371-40ef-ac08-2ae534b85458 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.143065] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 994.143065] env[62109]: value = "task-1116953" [ 994.143065] env[62109]: _type = "Task" [ 994.143065] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.158105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.158385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.160015] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116953, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.315183] env[62109]: DEBUG nova.network.neutron [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.316345] env[62109]: DEBUG nova.network.neutron [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.327044] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144791} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.327321] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.327570] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 994.327693] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 994.507205] env[62109]: DEBUG nova.scheduler.client.report [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.610788] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 994.612835] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116949, 'name': CloneVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.645079] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.645360] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.645566] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.645793] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.646034] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.646102] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.646303] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.646475] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.646656] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.646922] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.647038] env[62109]: DEBUG nova.virt.hardware [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.647989] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e871b130-59e3-48ef-8c77-cfb8564dd399 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.661856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0e6303-7302-46ad-ba16-494b7d72650c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.666778] env[62109]: INFO nova.compute.manager [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Detaching volume cb4727ca-8170-4c34-833f-c48c9f402ff9 [ 994.668622] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116953, 'name': ReconfigVM_Task, 'duration_secs': 0.343695} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.669134] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 994.728920] env[62109]: INFO nova.virt.block_device [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Attempting to driver detach volume cb4727ca-8170-4c34-833f-c48c9f402ff9 from mountpoint /dev/sdb [ 994.728920] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 994.728920] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244511', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'name': 'volume-cb4727ca-8170-4c34-833f-c48c9f402ff9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '414ac48f-68bc-4d37-98c0-4bcc9f7f37c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'serial': 'cb4727ca-8170-4c34-833f-c48c9f402ff9'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 994.730083] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf50a4c2-6e8e-42fb-9215-aef6ae087f5d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.755794] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d50e51a-39a3-4306-acd8-bda3514c9141 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.763423] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c87619-860e-4927-8fb8-04d6a9060790 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.784266] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bc2e8e-45b4-4bc1-8f58-6cb48950216d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.800148] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] The volume has not been displaced from its original location: [datastore1] volume-cb4727ca-8170-4c34-833f-c48c9f402ff9/volume-cb4727ca-8170-4c34-833f-c48c9f402ff9.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 994.805437] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 994.805776] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8df739f-ac60-4507-9aa9-f705573d28c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.820357] env[62109]: DEBUG oslo_concurrency.lockutils [req-c6ef741c-ee96-40a6-91d3-6239821cb565 req-e2714fec-e168-46e1-a4a1-22b7942d15b2 service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.828049] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 994.828049] env[62109]: value = "task-1116954" [ 994.828049] env[62109]: _type = "Task" [ 994.828049] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.836263] env[62109]: INFO nova.virt.block_device [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Booting with volume 52569525-8081-4e16-8b50-2801101cb579 at /dev/sdb [ 994.837918] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.869122] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-349f92f1-7534-4546-a816-90a0af395434 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.877934] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa28b6a0-c142-44c2-9e7a-b3cc2521097f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.909939] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3561141-c7de-4529-9c67-d45049b614d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.917824] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1674fc3f-ebe5-4148-a8c3-2b13059393fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.949282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f433ab-2598-4a59-97ff-0cd7d7f485c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.955620] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb64dc0f-c3ba-4403-a802-663f02af8bb9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.968587] env[62109]: DEBUG nova.virt.block_device [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating existing volume attachment record: bcd2b80b-ce7e-4946-818b-f0cacd854e99 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 995.012782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.013348] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 995.015937] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.027s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.017511] env[62109]: INFO nova.compute.claims [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.107761] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116949, 'name': CloneVM_Task, 'duration_secs': 1.08853} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.108133] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Created linked-clone VM from snapshot [ 995.109030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3826e29-69d1-48fd-bc67-6dac277add87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.116421] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Uploading image 2b2d2c7f-5de7-4d21-9323-c0da47d0ff76 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 995.127415] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 995.127707] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6ff130ae-2bc2-4a48-8a6e-8e2bea42f2a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.134978] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 995.134978] env[62109]: value = "task-1116955" [ 995.134978] env[62109]: _type = "Task" [ 995.134978] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.143178] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116955, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.185678] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 995.186077] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 995.186289] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.186709] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 995.186934] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.187346] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 995.187594] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 995.187773] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 995.187952] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 995.188401] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 995.188614] env[62109]: DEBUG nova.virt.hardware [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 995.194307] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 995.194950] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf027caf-b09b-41c2-a2cd-3d9098047184 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.210043] env[62109]: DEBUG nova.compute.manager [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Received event network-vif-plugged-8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.210278] env[62109]: DEBUG oslo_concurrency.lockutils [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.210487] env[62109]: DEBUG oslo_concurrency.lockutils [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.210656] env[62109]: DEBUG oslo_concurrency.lockutils [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.210839] env[62109]: DEBUG nova.compute.manager [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] No waiting events found dispatching network-vif-plugged-8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 995.211099] env[62109]: WARNING nova.compute.manager [req-6763cfcd-e3d9-4b01-952b-895c07fd26f7 req-d6842632-f782-4754-833d-d16efbce07f2 service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Received unexpected event network-vif-plugged-8b33420f-fd6d-43fc-b4e1-141768c6024b for instance with vm_state building and task_state spawning. [ 995.216450] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 995.216450] env[62109]: value = "task-1116956" [ 995.216450] env[62109]: _type = "Task" [ 995.216450] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.225069] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.336933] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116954, 'name': ReconfigVM_Task, 'duration_secs': 0.480141} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.337247] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 995.342187] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-139933ea-3152-4154-a82d-c254595ddcc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.358383] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 995.358383] env[62109]: value = "task-1116957" [ 995.358383] env[62109]: _type = "Task" [ 995.358383] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.366490] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116957, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.371530] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Successfully updated port: 8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.395207] env[62109]: DEBUG nova.compute.manager [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Received event network-changed-8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.395899] env[62109]: DEBUG nova.compute.manager [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Refreshing instance network info cache due to event network-changed-8b33420f-fd6d-43fc-b4e1-141768c6024b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 995.395899] env[62109]: DEBUG oslo_concurrency.lockutils [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.395899] env[62109]: DEBUG oslo_concurrency.lockutils [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.396341] env[62109]: DEBUG nova.network.neutron [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Refreshing network info cache for port 8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 995.433385] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.433592] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing instance network info cache due to event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 995.433812] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.433994] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.434140] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 995.524553] env[62109]: DEBUG nova.compute.utils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.526693] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 995.526884] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 995.604258] env[62109]: DEBUG nova.policy [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd20b2aa2aa4419597ddca6f95ced41f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '093c284d31de414cb583d501864456c8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 995.649218] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116955, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.731311] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116956, 'name': ReconfigVM_Task, 'duration_secs': 0.228279} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.731311] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 995.731311] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee111f6c-81b5-48d3-ba91-a0c60338188f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.758329] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.758329] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fea371c-0ecd-4bf2-bd9a-078c0b9d9c16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.776743] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 995.776743] env[62109]: value = "task-1116958" [ 995.776743] env[62109]: _type = "Task" [ 995.776743] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.785791] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116958, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.868909] env[62109]: DEBUG oslo_vmware.api [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116957, 'name': ReconfigVM_Task, 'duration_secs': 0.167369} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.869242] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244511', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'name': 'volume-cb4727ca-8170-4c34-833f-c48c9f402ff9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '414ac48f-68bc-4d37-98c0-4bcc9f7f37c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4727ca-8170-4c34-833f-c48c9f402ff9', 'serial': 'cb4727ca-8170-4c34-833f-c48c9f402ff9'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 995.874413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.936456] env[62109]: DEBUG nova.network.neutron [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.036387] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 996.054940] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Successfully created port: 86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.074482] env[62109]: DEBUG nova.network.neutron [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.147295] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116955, 'name': Destroy_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.195983] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updated VIF entry in instance network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 996.196365] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.283132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d3861d-5822-4471-9180-5aa2711e9208 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.290423] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.294823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9856c4db-0e3a-43ce-9a18-a47a7831530c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.324513] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a726dd94-e8a9-47bb-923b-25a2a0adedbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.331779] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689b85b0-395f-4dd7-8465-42df632dff64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.344801] env[62109]: DEBUG nova.compute.provider_tree [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.411518] env[62109]: DEBUG nova.objects.instance [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'flavor' on Instance uuid 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.576883] env[62109]: DEBUG oslo_concurrency.lockutils [req-3ee5c9b8-2101-4843-b5eb-373111e86488 req-55a60ddd-0085-4de4-b8b0-a21022742fde service nova] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.577298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.577862] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.646050] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116955, 'name': Destroy_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.699426] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.700070] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.700315] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing instance network info cache due to event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 996.700563] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.700757] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.700976] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 996.788523] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.847545] env[62109]: DEBUG nova.scheduler.client.report [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 997.049557] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.080685] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.081667] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.081667] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.081667] env[62109]: DEBUG nova.virt.hardware [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.084528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd2914-29d4-400f-a01b-a4870c42b14b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.096379] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a0225a-8e8a-4831-8227-fae04e91404b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.103070] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.103375] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.103555] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.103793] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.103939] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.104096] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.104394] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.104458] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.104677] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.104893] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.105128] env[62109]: DEBUG nova.virt.hardware [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.105962] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22201c7-09ed-4608-9f56-66bbfcf86346 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.126078] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2ebf17-b11f-4929-a801-1260a9d01c89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.133623] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.156837] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:b2:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4e51bf8-f6dd-4890-81ac-da83edf6812c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.164534] env[62109]: DEBUG oslo.service.loopingcall [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.168451] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 997.174534] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93254db8-66f6-4cf9-ac13-bb50f225c288 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.189901] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116955, 'name': Destroy_Task, 'duration_secs': 1.564375} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.190807] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Destroyed the VM [ 997.191097] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 997.191693] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d396ebdc-9c78-49f0-935f-eccc2a67a1ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.196872] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.196872] env[62109]: value = "task-1116959" [ 997.196872] env[62109]: _type = "Task" [ 997.196872] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.201206] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 997.201206] env[62109]: value = "task-1116960" [ 997.201206] env[62109]: _type = "Task" [ 997.201206] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.208946] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116959, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.215557] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116960, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.287973] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116958, 'name': ReconfigVM_Task, 'duration_secs': 1.145924} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.288236] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Reconfigured VM instance instance-00000059 to attach disk [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb/dfebeee8-06be-424b-89b0-7c1a3d4703eb.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.288519] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 997.353105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.353806] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 997.408299] env[62109]: DEBUG nova.network.neutron [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.420898] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4abc17ab-95b0-4efa-9011-535d65a3d96a tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.262s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.474616] env[62109]: DEBUG nova.compute.manager [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Received event network-vif-plugged-86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.474616] env[62109]: DEBUG oslo_concurrency.lockutils [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] Acquiring lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.474616] env[62109]: DEBUG oslo_concurrency.lockutils [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.474616] env[62109]: DEBUG oslo_concurrency.lockutils [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.474616] env[62109]: DEBUG nova.compute.manager [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] No waiting events found dispatching network-vif-plugged-86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 997.474616] env[62109]: WARNING nova.compute.manager [req-422cb40b-ced3-4697-82b3-ec583334e4e1 req-0a2cc9f2-c079-47bb-af69-4c82b790229e service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Received unexpected event network-vif-plugged-86b53aab-cc69-467f-be7d-15ca2b456351 for instance with vm_state building and task_state spawning. [ 997.483017] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updated VIF entry in instance network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 997.483431] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.571857] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Successfully updated port: 86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.672186] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.672505] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.672737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.672936] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.673154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.675553] env[62109]: INFO nova.compute.manager [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Terminating instance [ 997.677907] env[62109]: DEBUG nova.compute.manager [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 997.678146] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 997.679023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e81df32-e9a5-4738-9054-8d637209ab36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.687011] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 997.687447] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa84b555-a98b-4357-8280-a88dac1f42c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.693528] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 997.693528] env[62109]: value = "task-1116961" [ 997.693528] env[62109]: _type = "Task" [ 997.693528] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.703890] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.710454] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116959, 'name': CreateVM_Task, 'duration_secs': 0.344652} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.710995] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 997.712216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.712477] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.712876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.715992] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd2206b6-1c41-49d4-9c58-cba0769e9640 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.717943] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116960, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.721055] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 997.721055] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5261b4c0-64c5-cdff-1ba0-e192250ba7c2" [ 997.721055] env[62109]: _type = "Task" [ 997.721055] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.728812] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5261b4c0-64c5-cdff-1ba0-e192250ba7c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.795887] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12043caa-e391-4375-9f4b-4361cf8021be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.815669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b04445-fba2-4bce-9f5f-6c988b726ef0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.835331] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 997.860366] env[62109]: DEBUG nova.compute.utils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.861484] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 997.861661] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 997.908226] env[62109]: DEBUG nova.policy [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d8b4a13b12d477ebd973d90ec11f62d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94fd7a82dc0489597534c518365971b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 997.911061] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.911383] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Instance network_info: |[{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 997.912942] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:4f:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b33420f-fd6d-43fc-b4e1-141768c6024b', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.920636] env[62109]: DEBUG oslo.service.loopingcall [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.921012] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 997.921333] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6675611-a349-4ef0-b04c-92c8f9971ecb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.944961] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.944961] env[62109]: value = "task-1116962" [ 997.944961] env[62109]: _type = "Task" [ 997.944961] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.952685] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116962, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.986030] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.986438] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.987033] env[62109]: DEBUG nova.compute.manager [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 997.987679] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.987679] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.987679] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 998.074625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.074780] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.074916] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 998.203405] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116961, 'name': PowerOffVM_Task, 'duration_secs': 0.188768} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.203702] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 998.203880] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 998.204221] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddc37b29-a83a-4a23-a4b4-4e22718b5768 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.214063] env[62109]: DEBUG oslo_vmware.api [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116960, 'name': RemoveSnapshot_Task, 'duration_secs': 0.735369} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.214716] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 998.231031] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5261b4c0-64c5-cdff-1ba0-e192250ba7c2, 'name': SearchDatastore_Task, 'duration_secs': 0.009586} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.231339] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.231573] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.232237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.232237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.232237] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.232494] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b987efd1-a3c8-4ea5-95e4-0deea84c37a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.243040] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.243040] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 998.243040] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e3ac24d-6dae-4147-acfe-a3b3daa66f6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.249022] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 998.249022] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6eb14-d3f9-df99-0b01-53db7a3d2cae" [ 998.249022] env[62109]: _type = "Task" [ 998.249022] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.256843] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6eb14-d3f9-df99-0b01-53db7a3d2cae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.300136] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 998.300136] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 998.300136] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleting the datastore file [datastore2] 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.300136] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a522b74-8432-4444-97c6-92925568c919 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.307257] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for the task: (returnval){ [ 998.307257] env[62109]: value = "task-1116964" [ 998.307257] env[62109]: _type = "Task" [ 998.307257] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.317066] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.366219] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 998.391825] env[62109]: DEBUG nova.network.neutron [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Port e5781b49-4005-4203-8390-dc6af21b6eda binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 998.455746] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116962, 'name': CreateVM_Task, 'duration_secs': 0.357878} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.456027] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 998.456654] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.456821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.457164] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 998.457418] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6eef8fb-6a6c-4533-84cb-f627cd303ec4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.461849] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 998.461849] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526b9348-bae2-3737-4fdb-8120c8c2abf0" [ 998.461849] env[62109]: _type = "Task" [ 998.461849] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.469541] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526b9348-bae2-3737-4fdb-8120c8c2abf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.470387] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Successfully created port: 879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.623880] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 998.719315] env[62109]: WARNING nova.compute.manager [None req-a3f4c8f4-f9e8-4165-8627-7557a705cd0c tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Image not found during snapshot: nova.exception.ImageNotFound: Image 2b2d2c7f-5de7-4d21-9323-c0da47d0ff76 could not be found. [ 998.758714] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6eb14-d3f9-df99-0b01-53db7a3d2cae, 'name': SearchDatastore_Task, 'duration_secs': 0.008552} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.759524] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-483c30e8-fd9d-4b84-8a17-44732d8c2c6b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.765063] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 998.765063] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ec1b9c-bf20-4278-16bc-69d3665d9a77" [ 998.765063] env[62109]: _type = "Task" [ 998.765063] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.775313] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ec1b9c-bf20-4278-16bc-69d3665d9a77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.816620] env[62109]: DEBUG oslo_vmware.api [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Task: {'id': task-1116964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141135} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.816878] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.817119] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 998.817272] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 998.817497] env[62109]: INFO nova.compute.manager [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 998.817672] env[62109]: DEBUG oslo.service.loopingcall [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.817866] env[62109]: DEBUG nova.compute.manager [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 998.817962] env[62109]: DEBUG nova.network.neutron [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 998.853764] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 998.854177] env[62109]: DEBUG nova.network.neutron [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.867301] env[62109]: DEBUG nova.network.neutron [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Updating instance_info_cache with network_info: [{"id": "86b53aab-cc69-467f-be7d-15ca2b456351", "address": "fa:16:3e:4c:fd:0c", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b53aab-cc", "ovs_interfaceid": "86b53aab-cc69-467f-be7d-15ca2b456351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.973025] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526b9348-bae2-3737-4fdb-8120c8c2abf0, 'name': SearchDatastore_Task, 'duration_secs': 0.008639} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.973366] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.973607] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.973826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.277927] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ec1b9c-bf20-4278-16bc-69d3665d9a77, 'name': SearchDatastore_Task, 'duration_secs': 0.030921} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.278477] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.278809] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 999.279151] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.279656] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.279994] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56235b13-a4f6-4ab5-936f-2746f4f9311b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.282707] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bacad9da-6564-4e6f-b15b-d4449b6a8e84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.291281] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 999.291281] env[62109]: value = "task-1116965" [ 999.291281] env[62109]: _type = "Task" [ 999.291281] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.299956] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.303738] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.303935] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 999.304695] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8120ff12-7ab0-4743-b3bb-802fee6647f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.310104] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 999.310104] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528ad49b-7852-8307-0681-f7462ff6a0af" [ 999.310104] env[62109]: _type = "Task" [ 999.310104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.318539] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528ad49b-7852-8307-0681-f7462ff6a0af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.357417] env[62109]: DEBUG oslo_concurrency.lockutils [req-39658af5-9a0d-48a5-b3f5-f6fba17652a4 req-d4f77d95-e54c-40ae-9a39-9f193449a5be service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.369539] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.369946] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Instance network_info: |[{"id": "86b53aab-cc69-467f-be7d-15ca2b456351", "address": "fa:16:3e:4c:fd:0c", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b53aab-cc", "ovs_interfaceid": "86b53aab-cc69-467f-be7d-15ca2b456351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 999.370461] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:fd:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86b53aab-cc69-467f-be7d-15ca2b456351', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.378861] env[62109]: DEBUG oslo.service.loopingcall [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.381102] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 999.382460] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 999.382654] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8bfa8e3-f60d-454f-9d5c-5c2ee5575fb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.420523] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.422018] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.422343] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.422343] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.422473] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.422680] env[62109]: DEBUG nova.virt.hardware [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.423207] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.423446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.424475] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.427391] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70754e87-611b-4f1a-b6fc-821c1330bcfa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.430469] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.430469] env[62109]: value = "task-1116966" [ 999.430469] env[62109]: _type = "Task" [ 999.430469] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.440188] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23088cf-17c9-41d8-89d2-d42f6580a1a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.448078] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116966, 'name': CreateVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.547246] env[62109]: DEBUG nova.compute.manager [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Received event network-changed-86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.547246] env[62109]: DEBUG nova.compute.manager [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Refreshing instance network info cache due to event network-changed-86b53aab-cc69-467f-be7d-15ca2b456351. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 999.547246] env[62109]: DEBUG oslo_concurrency.lockutils [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] Acquiring lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.547246] env[62109]: DEBUG oslo_concurrency.lockutils [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] Acquired lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.547809] env[62109]: DEBUG nova.network.neutron [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Refreshing network info cache for port 86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 999.620563] env[62109]: DEBUG nova.compute.manager [req-711c12d9-e73f-4241-b475-735a656d48af req-f979bd3e-a699-486f-8e49-afcbf2daf9e3 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Received event network-vif-deleted-f9409e40-51f1-46fc-b21a-1c43e176574f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.620845] env[62109]: INFO nova.compute.manager [req-711c12d9-e73f-4241-b475-735a656d48af req-f979bd3e-a699-486f-8e49-afcbf2daf9e3 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Neutron deleted interface f9409e40-51f1-46fc-b21a-1c43e176574f; detaching it from the instance and deleting it from the info cache [ 999.621053] env[62109]: DEBUG nova.network.neutron [req-711c12d9-e73f-4241-b475-735a656d48af req-f979bd3e-a699-486f-8e49-afcbf2daf9e3 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.802242] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116965, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.821055] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528ad49b-7852-8307-0681-f7462ff6a0af, 'name': SearchDatastore_Task, 'duration_secs': 0.017617} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.822046] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d9c8233-7cd2-4a15-986f-122513f9071c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.827253] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 999.827253] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a814e8-ed56-063e-6ceb-ddb9cdc5c7f9" [ 999.827253] env[62109]: _type = "Task" [ 999.827253] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.835639] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a814e8-ed56-063e-6ceb-ddb9cdc5c7f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.946732] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116966, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.034470] env[62109]: DEBUG nova.network.neutron [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.093503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.093503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.093503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.093503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.093503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.097212] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Successfully updated port: 879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.098624] env[62109]: INFO nova.compute.manager [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Terminating instance [ 1000.101190] env[62109]: DEBUG nova.compute.manager [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1000.101397] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1000.103052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228856d4-be00-4443-955d-f66e2d028689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.112109] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1000.112995] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73baff1c-3f64-4248-8337-3d682014100f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.120366] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 1000.120366] env[62109]: value = "task-1116967" [ 1000.120366] env[62109]: _type = "Task" [ 1000.120366] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.125294] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f10d1035-8daa-44bd-9f3f-05f627ffc720 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.134282] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.140786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ec0768-bc8b-489b-801f-3a1e5edd3a6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.178158] env[62109]: DEBUG nova.compute.manager [req-711c12d9-e73f-4241-b475-735a656d48af req-f979bd3e-a699-486f-8e49-afcbf2daf9e3 service nova] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Detach interface failed, port_id=f9409e40-51f1-46fc-b21a-1c43e176574f, reason: Instance 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1000.297913] env[62109]: DEBUG nova.network.neutron [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Updated VIF entry in instance network info cache for port 86b53aab-cc69-467f-be7d-15ca2b456351. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1000.298314] env[62109]: DEBUG nova.network.neutron [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Updating instance_info_cache with network_info: [{"id": "86b53aab-cc69-467f-be7d-15ca2b456351", "address": "fa:16:3e:4c:fd:0c", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b53aab-cc", "ovs_interfaceid": "86b53aab-cc69-467f-be7d-15ca2b456351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.305502] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515199} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.305761] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1000.306075] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.306349] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e6b1a0f-a01a-4bb8-b642-7daf025798bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.313834] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1000.313834] env[62109]: value = "task-1116968" [ 1000.313834] env[62109]: _type = "Task" [ 1000.313834] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.321508] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.336290] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a814e8-ed56-063e-6ceb-ddb9cdc5c7f9, 'name': SearchDatastore_Task, 'duration_secs': 0.016062} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.336545] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.336803] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1000.337084] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1c481df-41e5-4df6-ba35-b683a9ee2305 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.342724] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1000.342724] env[62109]: value = "task-1116969" [ 1000.342724] env[62109]: _type = "Task" [ 1000.342724] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.350558] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.442801] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116966, 'name': CreateVM_Task, 'duration_secs': 0.653694} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.443060] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1000.443797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.444025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.444389] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.444685] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc852e0-4d44-49e8-ad7b-3f5587c44d9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.449511] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1000.449511] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52279e6a-106f-668d-4227-4039897a4fdb" [ 1000.449511] env[62109]: _type = "Task" [ 1000.449511] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.457853] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52279e6a-106f-668d-4227-4039897a4fdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.463368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.463647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.463758] env[62109]: DEBUG nova.network.neutron [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1000.537007] env[62109]: INFO nova.compute.manager [-] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Took 1.72 seconds to deallocate network for instance. [ 1000.601563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.601563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.601563] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1000.635610] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116967, 'name': PowerOffVM_Task, 'duration_secs': 0.191419} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.636408] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1000.636803] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1000.637385] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cc49dd8-c97d-4c61-941a-e7a771576294 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.750958] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1000.751222] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1000.751414] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleting the datastore file [datastore2] dce54763-ad3a-40d3-8f72-f0a1aefaf086 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.751703] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ff8e0f5-e0ee-49b9-b969-39bd6ee7dba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.758771] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for the task: (returnval){ [ 1000.758771] env[62109]: value = "task-1116971" [ 1000.758771] env[62109]: _type = "Task" [ 1000.758771] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.767159] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.801185] env[62109]: DEBUG oslo_concurrency.lockutils [req-9e676f5c-db9e-49d4-b723-612e8ebc36e5 req-6dacf7c4-80a2-481f-8668-d703e7f365a1 service nova] Releasing lock "refresh_cache-f72ca981-1bba-44d9-854f-7677f1a0c764" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.823166] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06935} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.823524] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.824345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e927d7-8721-4b08-ba96-b00d14da61a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.847701] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.848466] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a366a40-8cd2-41db-b16e-4a9e3399b66e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.871269] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116969, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515486} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.872470] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1000.872689] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.872989] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1000.872989] env[62109]: value = "task-1116972" [ 1000.872989] env[62109]: _type = "Task" [ 1000.872989] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.873212] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfbc12d6-412d-4984-9b65-26b4cb4461f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.883176] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116972, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.884332] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1000.884332] env[62109]: value = "task-1116973" [ 1000.884332] env[62109]: _type = "Task" [ 1000.884332] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.960390] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52279e6a-106f-668d-4227-4039897a4fdb, 'name': SearchDatastore_Task, 'duration_secs': 0.008219} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.960458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.960711] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.960999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.961165] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.961379] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.961624] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8eea976f-8f35-44a2-ad0c-4f08d7ca3440 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.969351] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.969536] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1000.970289] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22c1c499-680d-4fe4-b011-3a5733a87483 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.975128] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1000.975128] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f4ff9-0bde-e7b5-7039-d05395f5777e" [ 1000.975128] env[62109]: _type = "Task" [ 1000.975128] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.982602] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f4ff9-0bde-e7b5-7039-d05395f5777e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.001043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.001286] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.044651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.044910] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.045153] env[62109]: DEBUG nova.objects.instance [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lazy-loading 'resources' on Instance uuid 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.091061] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.091061] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.091061] env[62109]: DEBUG nova.objects.instance [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'flavor' on Instance uuid 6b5a009e-28f5-4be7-8641-089abe359954 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.134987] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1001.170218] env[62109]: DEBUG nova.network.neutron [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.268573] env[62109]: DEBUG oslo_vmware.api [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Task: {'id': task-1116971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227549} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.269417] env[62109]: DEBUG nova.network.neutron [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.270585] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.270769] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1001.270947] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1001.271136] env[62109]: INFO nova.compute.manager [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1001.271379] env[62109]: DEBUG oslo.service.loopingcall [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.271752] env[62109]: DEBUG nova.compute.manager [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1001.271850] env[62109]: DEBUG nova.network.neutron [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1001.384370] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116972, 'name': ReconfigVM_Task, 'duration_secs': 0.282196} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.384766] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 5842e112-d3ef-4ce9-91cc-198e68d12422/5842e112-d3ef-4ce9-91cc-198e68d12422.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.386242] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'encryption_format': None, 'device_type': 'disk', 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'image_id': '6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'}, 'device_type': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'attachment_id': 'bcd2b80b-ce7e-4946-818b-f0cacd854e99', 'guest_format': None, 'boot_index': None, 'volume_type': None}], 'swap': None} {{(pid=62109) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1001.386458] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1001.386652] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1001.390263] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c061294-8fc9-42ee-ba5f-0cbd9776d8f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.397534] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073529} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.407560] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.408483] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1f1066-5d12-48e8-b9dd-facbc1f58621 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.411181] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a05dfd7-4225-424c-9254-a42a6ec12c4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.443168] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.451558] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.451828] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c919fc35-5217-4724-beea-34a1a7b05845 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.468936] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6b18297-6567-41aa-8aa0-2e3a7c0f74b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.491141] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1001.491141] env[62109]: value = "task-1116974" [ 1001.491141] env[62109]: _type = "Task" [ 1001.491141] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.499249] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529f4ff9-0bde-e7b5-7039-d05395f5777e, 'name': SearchDatastore_Task, 'duration_secs': 0.013577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.499575] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1001.499575] env[62109]: value = "task-1116975" [ 1001.499575] env[62109]: _type = "Task" [ 1001.499575] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.502280] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e136577f-bb8e-4a6c-9c9d-2cb9cef5cdf9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.511290] env[62109]: DEBUG nova.compute.utils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.513015] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116974, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.520634] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1001.520634] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5253a832-276f-44f1-c67b-45ab1912449a" [ 1001.520634] env[62109]: _type = "Task" [ 1001.520634] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.521199] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.531464] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5253a832-276f-44f1-c67b-45ab1912449a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.674403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.705899] env[62109]: DEBUG nova.objects.instance [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'pci_requests' on Instance uuid 6b5a009e-28f5-4be7-8641-089abe359954 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.773656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.773656] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Instance network_info: |[{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1001.773997] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:5c:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '879d4990-6187-4722-b8c5-0c20f9fa59cc', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.781888] env[62109]: DEBUG oslo.service.loopingcall [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.783020] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1001.783737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbab508e-2502-4518-8bb6-693265eac02e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.786305] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-894f8e0e-33c0-4459-95cb-3479a43c4235 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.805306] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343afee7-79a6-47e2-8e2e-34bc789f9342 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.809418] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.809418] env[62109]: value = "task-1116976" [ 1001.809418] env[62109]: _type = "Task" [ 1001.809418] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.838964] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209e28aa-8977-4e57-8dde-2009593bbc40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.844272] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116976, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.848802] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5deb2f-1b72-4570-a6b4-736ccb2c6611 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.862056] env[62109]: DEBUG nova.compute.provider_tree [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.003245] env[62109]: DEBUG nova.network.neutron [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.004523] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116974, 'name': ReconfigVM_Task, 'duration_secs': 0.337893} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.008683] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to attach disk [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.014243] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.014812] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d020c65e-4972-4957-8b68-5437a52461f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.033821] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116975, 'name': ReconfigVM_Task, 'duration_secs': 0.283094} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.035302] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.035985] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1002.035985] env[62109]: value = "task-1116977" [ 1002.035985] env[62109]: _type = "Task" [ 1002.035985] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.036419] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f3263a1-4e81-4d56-84d5-8e7d10d4da3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.044282] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5253a832-276f-44f1-c67b-45ab1912449a, 'name': SearchDatastore_Task, 'duration_secs': 0.01637} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.044830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.045125] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] f72ca981-1bba-44d9-854f-7677f1a0c764/f72ca981-1bba-44d9-854f-7677f1a0c764.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1002.045653] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f3f7de0-1d0e-4bbb-9824-f5ed412a2970 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.048952] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1002.048952] env[62109]: value = "task-1116978" [ 1002.048952] env[62109]: _type = "Task" [ 1002.048952] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.051730] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116977, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.055815] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1002.055815] env[62109]: value = "task-1116979" [ 1002.055815] env[62109]: _type = "Task" [ 1002.055815] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.061919] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116978, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.066655] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.195621] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9841e843-5f7b-4a03-992b-9a984caa2ac2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.216090] env[62109]: DEBUG nova.objects.base [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Object Instance<6b5a009e-28f5-4be7-8641-089abe359954> lazy-loaded attributes: flavor,pci_requests {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1002.216287] env[62109]: DEBUG nova.network.neutron [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1002.219200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176ab7c1-5aae-4df7-b7d5-920d6082e7b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.226918] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1002.287007] env[62109]: DEBUG nova.policy [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1002.319744] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1116976, 'name': CreateVM_Task, 'duration_secs': 0.398254} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.319984] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1002.320646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.320858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.321256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.321519] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4fd9b8d-3259-4627-b5d1-b5f6c860f8c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.326038] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1002.326038] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52600d41-9730-65b8-eb8a-e73ee2248aca" [ 1002.326038] env[62109]: _type = "Task" [ 1002.326038] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.333590] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52600d41-9730-65b8-eb8a-e73ee2248aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.366035] env[62109]: DEBUG nova.scheduler.client.report [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.505918] env[62109]: INFO nova.compute.manager [-] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Took 1.23 seconds to deallocate network for instance. [ 1002.548835] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116977, 'name': ReconfigVM_Task, 'duration_secs': 0.178476} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.549210] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1002.550050] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c075d05d-8bc7-4738-8d3b-6ae3d19c4dfe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.566104] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116978, 'name': Rename_Task, 'duration_secs': 0.151217} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.566493] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1002.566493] env[62109]: value = "task-1116980" [ 1002.566493] env[62109]: _type = "Task" [ 1002.566493] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.567265] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1002.567685] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f4d44d0-44ef-46a8-95b5-d444ebfc21b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.575387] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116979, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.581303] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116980, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.583468] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1002.583468] env[62109]: value = "task-1116981" [ 1002.583468] env[62109]: _type = "Task" [ 1002.583468] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.599184] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.734053] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1002.734416] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b236a80d-15e9-4b8e-88d1-d0c05eba4391 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.743212] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1002.743212] env[62109]: value = "task-1116982" [ 1002.743212] env[62109]: _type = "Task" [ 1002.743212] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.752324] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.835838] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52600d41-9730-65b8-eb8a-e73ee2248aca, 'name': SearchDatastore_Task, 'duration_secs': 0.046132} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.836176] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.836411] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.836650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.836802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.836985] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.837268] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11b296bd-4cf0-4996-852a-3eee98ad8a08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.853766] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.854143] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1002.855112] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df23273-2a8d-47cc-ad81-794e4f62f7d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.860736] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1002.860736] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52af6336-47b2-614e-05eb-51db8c1a0903" [ 1002.860736] env[62109]: _type = "Task" [ 1002.860736] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.868547] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52af6336-47b2-614e-05eb-51db8c1a0903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.870422] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.892600] env[62109]: INFO nova.scheduler.client.report [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Deleted allocations for instance 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5 [ 1002.993412] env[62109]: DEBUG nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Received event network-vif-plugged-879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.993614] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.993825] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.993999] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.994189] env[62109]: DEBUG nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] No waiting events found dispatching network-vif-plugged-879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1002.994364] env[62109]: WARNING nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Received unexpected event network-vif-plugged-879d4990-6187-4722-b8c5-0c20f9fa59cc for instance with vm_state building and task_state spawning. [ 1002.994597] env[62109]: DEBUG nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Received event network-changed-879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.994778] env[62109]: DEBUG nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Refreshing instance network info cache due to event network-changed-879d4990-6187-4722-b8c5-0c20f9fa59cc. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1002.994967] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Acquiring lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.995125] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Acquired lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.995336] env[62109]: DEBUG nova.network.neutron [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Refreshing network info cache for port 879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1003.014547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.014820] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.015078] env[62109]: DEBUG nova.objects.instance [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lazy-loading 'resources' on Instance uuid dce54763-ad3a-40d3-8f72-f0a1aefaf086 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.067015] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116979, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625893} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.067421] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] f72ca981-1bba-44d9-854f-7677f1a0c764/f72ca981-1bba-44d9-854f-7677f1a0c764.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1003.067698] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.068127] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e25bfdea-2329-4973-8724-ae0f88c45c63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.079593] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116980, 'name': Rename_Task, 'duration_secs': 0.185832} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.081449] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1003.081890] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1003.081890] env[62109]: value = "task-1116983" [ 1003.081890] env[62109]: _type = "Task" [ 1003.081890] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.082173] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86ed9698-cdd8-4d48-95ca-4aec5507d90b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.096461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.096891] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.097257] env[62109]: INFO nova.compute.manager [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attaching volume 0ab70d08-11b8-4801-8d13-3c142199f3d4 to /dev/sdb [ 1003.099619] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1003.099619] env[62109]: value = "task-1116984" [ 1003.099619] env[62109]: _type = "Task" [ 1003.099619] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.107458] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.119571] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116981, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.127751] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116984, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.146158] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51901530-4af4-481f-bd05-a1f7eaf5dcba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.153910] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3c7799-5f3c-4fe0-bee5-5beabdf8a35e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.167621] env[62109]: DEBUG nova.virt.block_device [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating existing volume attachment record: 0eb94039-f7b0-4a27-9b48-4488f66f3c6f {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1003.252339] env[62109]: DEBUG oslo_vmware.api [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1116982, 'name': PowerOnVM_Task, 'duration_secs': 0.482499} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.252760] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1003.252991] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efde1154-eaad-448b-a4a1-daf673de71f1 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance 'dfebeee8-06be-424b-89b0-7c1a3d4703eb' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1003.370850] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52af6336-47b2-614e-05eb-51db8c1a0903, 'name': SearchDatastore_Task, 'duration_secs': 0.019425} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.371662] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddf8792c-8020-44e3-ab05-97ae723f6ca8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.376746] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1003.376746] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52cab800-ab59-171c-05c8-5ef178db71d7" [ 1003.376746] env[62109]: _type = "Task" [ 1003.376746] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.383787] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52cab800-ab59-171c-05c8-5ef178db71d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.400120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd93c299-9ccb-423b-bd9b-72e05d9fb666 tempest-AttachVolumeNegativeTest-2017111171 tempest-AttachVolumeNegativeTest-2017111171-project-member] Lock "414ac48f-68bc-4d37-98c0-4bcc9f7f37c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.727s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.598628] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094185} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.599266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.600143] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56992993-e653-4d86-9337-0c6b802dac6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.608176] env[62109]: DEBUG oslo_vmware.api [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1116981, 'name': PowerOnVM_Task, 'duration_secs': 0.53684} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.608917] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1003.609141] env[62109]: INFO nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1003.609424] env[62109]: DEBUG nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1003.621991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e664653-1aa5-497d-96f7-302874a50145 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.632720] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] f72ca981-1bba-44d9-854f-7677f1a0c764/f72ca981-1bba-44d9-854f-7677f1a0c764.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.638248] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0715e871-f90c-45b3-b043-9bf81ad97f21 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.657120] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116984, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.664624] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1003.664624] env[62109]: value = "task-1116986" [ 1003.664624] env[62109]: _type = "Task" [ 1003.664624] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.677713] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116986, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.767480] env[62109]: DEBUG nova.network.neutron [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updated VIF entry in instance network info cache for port 879d4990-6187-4722-b8c5-0c20f9fa59cc. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1003.767480] env[62109]: DEBUG nova.network.neutron [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.796785] env[62109]: DEBUG nova.network.neutron [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Successfully updated port: fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.799542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5835f7ff-fec3-450b-afda-c509b614fb29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.808647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0630b0-11c0-456e-8a68-896058a3298d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.841288] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ac438d-371f-4e97-b669-c459d32c69f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.849206] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0e0162-45f9-4cb2-90e8-fa01da05ce10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.863845] env[62109]: DEBUG nova.compute.provider_tree [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.887229] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52cab800-ab59-171c-05c8-5ef178db71d7, 'name': SearchDatastore_Task, 'duration_secs': 0.033032} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.887500] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.887776] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1003.888070] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b54a3eb-c2f3-4f90-9da2-e1250de4060f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.894779] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1003.894779] env[62109]: value = "task-1116987" [ 1003.894779] env[62109]: _type = "Task" [ 1003.894779] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.902841] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.119635] env[62109]: DEBUG oslo_vmware.api [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116984, 'name': PowerOnVM_Task, 'duration_secs': 0.874204} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.119983] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1004.120206] env[62109]: DEBUG nova.compute.manager [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.120987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895f5805-28eb-4102-af82-0122fe79aeac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.176460] env[62109]: INFO nova.compute.manager [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Took 19.65 seconds to build instance. [ 1004.181795] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116986, 'name': ReconfigVM_Task, 'duration_secs': 0.411557} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.182127] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Reconfigured VM instance instance-0000005d to attach disk [datastore1] f72ca981-1bba-44d9-854f-7677f1a0c764/f72ca981-1bba-44d9-854f-7677f1a0c764.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.182986] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb0fc9e2-60a8-4c89-99a6-8ce28e4fab06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.191581] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1004.191581] env[62109]: value = "task-1116988" [ 1004.191581] env[62109]: _type = "Task" [ 1004.191581] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.203699] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116988, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.268142] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] Releasing lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.268142] env[62109]: DEBUG nova.compute.manager [req-0dc3ba3f-06d8-43b0-88da-bf719490f1b1 req-73fbd514-917a-48c0-ad06-7e511cd0d83a service nova] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Received event network-vif-deleted-c2bbdfe0-c3a1-4830-9c31-2bc6645a0ab9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1004.304210] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.304534] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.304742] env[62109]: DEBUG nova.network.neutron [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1004.368600] env[62109]: DEBUG nova.scheduler.client.report [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.405899] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116987, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.636179] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.679040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4f882d6e-ea94-49ad-a322-fe0ce7af5c64 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.162s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.701640] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116988, 'name': Rename_Task, 'duration_secs': 0.450819} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.704142] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1004.704142] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-717cafcb-5b55-4fd6-b478-5640979e71b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.712020] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1004.712020] env[62109]: value = "task-1116990" [ 1004.712020] env[62109]: _type = "Task" [ 1004.712020] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.718181] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.857461] env[62109]: WARNING nova.network.neutron [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] cfbec6c5-3421-476e-aca8-de96e0de15af already exists in list: networks containing: ['cfbec6c5-3421-476e-aca8-de96e0de15af']. ignoring it [ 1004.872497] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.874597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.239s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.874792] env[62109]: DEBUG nova.objects.instance [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1004.906147] env[62109]: INFO nova.scheduler.client.report [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Deleted allocations for instance dce54763-ad3a-40d3-8f72-f0a1aefaf086 [ 1004.916817] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633285} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.917313] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1004.917537] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.918360] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83aeda53-c2c4-4542-9c0d-7ea66c29e0c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.924472] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1004.924472] env[62109]: value = "task-1116991" [ 1004.924472] env[62109]: _type = "Task" [ 1004.924472] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.933019] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.105918] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.106425] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.106634] env[62109]: DEBUG nova.compute.manager [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Going to confirm migration 4 {{(pid=62109) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1005.135605] env[62109]: DEBUG nova.compute.manager [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.135833] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.136054] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.136225] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.136398] env[62109]: DEBUG nova.compute.manager [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] No waiting events found dispatching network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1005.136562] env[62109]: WARNING nova.compute.manager [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received unexpected event network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 for instance with vm_state active and task_state None. [ 1005.136729] env[62109]: DEBUG nova.compute.manager [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.136945] env[62109]: DEBUG nova.compute.manager [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-fdebe937-16f4-47b6-982f-2a88b25aa054. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1005.137058] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.219789] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116990, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.421615] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9be08f43-7970-4640-9eb1-22e1a97e344f tempest-ImagesTestJSON-1347400972 tempest-ImagesTestJSON-1347400972-project-member] Lock "dce54763-ad3a-40d3-8f72-f0a1aefaf086" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.329s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.433233] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065754} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.434209] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.434856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f68c961-1541-4bc6-81eb-95da4d9c6604 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.458547] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.459238] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2e5a257-ea85-44ee-b744-fc05fb47e6b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.480502] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1005.480502] env[62109]: value = "task-1116993" [ 1005.480502] env[62109]: _type = "Task" [ 1005.480502] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.493285] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116993, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.628568] env[62109]: DEBUG nova.network.neutron [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fdebe937-16f4-47b6-982f-2a88b25aa054", "address": "fa:16:3e:41:30:37", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdebe937-16", "ovs_interfaceid": "fdebe937-16f4-47b6-982f-2a88b25aa054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.682332] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.682568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.682756] env[62109]: DEBUG nova.network.neutron [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1005.682950] env[62109]: DEBUG nova.objects.instance [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'info_cache' on Instance uuid dfebeee8-06be-424b-89b0-7c1a3d4703eb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.722105] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116990, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.883522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9a906c53-390c-4330-b8c3-df64c4df0e30 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.991856] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116993, 'name': ReconfigVM_Task, 'duration_secs': 0.492342} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.992326] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.993113] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-877ac497-aad3-4b19-90ab-e078d27000d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.999416] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1005.999416] env[62109]: value = "task-1116994" [ 1005.999416] env[62109]: _type = "Task" [ 1005.999416] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.008613] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116994, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.131760] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.132514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.132683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.132975] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.133192] env[62109]: DEBUG nova.network.neutron [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1006.135027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f226cb-42ee-403a-bc03-9d7470f130b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.154543] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.154792] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.154958] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.155164] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.155318] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.155469] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.155676] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.155838] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.156013] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.156195] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.156431] env[62109]: DEBUG nova.virt.hardware [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.162748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfiguring VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1006.163719] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63dbaca2-e53a-4e67-a60b-a9a987656eac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.181787] env[62109]: DEBUG oslo_vmware.api [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1006.181787] env[62109]: value = "task-1116995" [ 1006.181787] env[62109]: _type = "Task" [ 1006.181787] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.191721] env[62109]: DEBUG oslo_vmware.api [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116995, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.221477] env[62109]: DEBUG oslo_vmware.api [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116990, 'name': PowerOnVM_Task, 'duration_secs': 1.18302} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.221906] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1006.221957] env[62109]: INFO nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1006.222151] env[62109]: DEBUG nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.222988] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73bd85d-7cdd-4368-97e3-e561ea097084 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.511935] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116994, 'name': Rename_Task, 'duration_secs': 0.287501} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.511935] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1006.512128] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eea35375-e748-4600-b0a2-905f47b9a4cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.518276] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1006.518276] env[62109]: value = "task-1116996" [ 1006.518276] env[62109]: _type = "Task" [ 1006.518276] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.526388] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.693944] env[62109]: DEBUG oslo_vmware.api [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.744727] env[62109]: INFO nova.compute.manager [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Took 21.71 seconds to build instance. [ 1006.854981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.855350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.881262] env[62109]: DEBUG nova.network.neutron [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port fdebe937-16f4-47b6-982f-2a88b25aa054. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1006.881815] env[62109]: DEBUG nova.network.neutron [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fdebe937-16f4-47b6-982f-2a88b25aa054", "address": "fa:16:3e:41:30:37", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdebe937-16", "ovs_interfaceid": "fdebe937-16f4-47b6-982f-2a88b25aa054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.032461] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.123521] env[62109]: DEBUG nova.network.neutron [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [{"id": "e5781b49-4005-4203-8390-dc6af21b6eda", "address": "fa:16:3e:73:53:db", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5781b49-40", "ovs_interfaceid": "e5781b49-4005-4203-8390-dc6af21b6eda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.193263] env[62109]: DEBUG oslo_vmware.api [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1116995, 'name': ReconfigVM_Task, 'duration_secs': 0.856588} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.193784] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.194046] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfigured VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1007.248407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6accf5e8-939f-4aa3-ad6a-a095b452157a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.226s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.345456] env[62109]: DEBUG nova.compute.manager [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Received event network-changed-8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.345663] env[62109]: DEBUG nova.compute.manager [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Refreshing instance network info cache due to event network-changed-8b33420f-fd6d-43fc-b4e1-141768c6024b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1007.345886] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.346145] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.346379] env[62109]: DEBUG nova.network.neutron [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Refreshing network info cache for port 8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1007.359170] env[62109]: INFO nova.compute.manager [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Detaching volume 52569525-8081-4e16-8b50-2801101cb579 [ 1007.385181] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc92152-32fa-4491-9d3f-b66f664fa4ce req-a31d627c-d7e3-44e2-9aa1-5ee153e57ff8 service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.399025] env[62109]: INFO nova.virt.block_device [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Attempting to driver detach volume 52569525-8081-4e16-8b50-2801101cb579 from mountpoint /dev/sdb [ 1007.399025] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1007.399025] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1007.399025] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d9819f-88d5-43b6-aaa4-f409c416a47a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.420974] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75b104c-36ca-42b2-906e-6e3a26453482 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.428636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e756a895-a4b2-402f-a1fe-41f57ce0ac66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.448736] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858b36d3-2fc2-4975-b73c-3d894f48ce68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.464560] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] The volume has not been displaced from its original location: [datastore2] volume-52569525-8081-4e16-8b50-2801101cb579/volume-52569525-8081-4e16-8b50-2801101cb579.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1007.470138] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1007.470498] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7e32f49-49c7-4f61-bb20-62b4515489ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.488736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "f72ca981-1bba-44d9-854f-7677f1a0c764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.488736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.488993] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.490602] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.490602] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.492390] env[62109]: INFO nova.compute.manager [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Terminating instance [ 1007.494461] env[62109]: DEBUG nova.compute.manager [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1007.494575] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1007.496601] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed33e156-00e6-4785-b14d-ea3c8e6f62ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.500440] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1007.500440] env[62109]: value = "task-1116997" [ 1007.500440] env[62109]: _type = "Task" [ 1007.500440] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.509389] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1007.510134] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebf20a47-4111-47da-af68-a595799b67bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.516044] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.520172] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1007.520172] env[62109]: value = "task-1116998" [ 1007.520172] env[62109]: _type = "Task" [ 1007.520172] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.532046] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116996, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.535182] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.627333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-dfebeee8-06be-424b-89b0-7c1a3d4703eb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.627647] env[62109]: DEBUG nova.objects.instance [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'migration_context' on Instance uuid dfebeee8-06be-424b-89b0-7c1a3d4703eb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.699735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-81376f89-1f7d-4afa-96d3-be2c76327af9 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.608s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.723676] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1007.723676] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244518', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'name': 'volume-0ab70d08-11b8-4801-8d13-3c142199f3d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'serial': '0ab70d08-11b8-4801-8d13-3c142199f3d4'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1007.724987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115750b3-f905-4be6-af83-252c82807dab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.742520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2394ad33-7b82-44cf-a9eb-cf4685930968 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.772017] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] volume-0ab70d08-11b8-4801-8d13-3c142199f3d4/volume-0ab70d08-11b8-4801-8d13-3c142199f3d4.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.772403] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ea35f3f-1ff2-40d2-8351-ebfc67d4dc38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.790609] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1007.790609] env[62109]: value = "task-1117000" [ 1007.790609] env[62109]: _type = "Task" [ 1007.790609] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.799488] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.010523] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1116997, 'name': ReconfigVM_Task, 'duration_secs': 0.302806} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.010801] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1008.015429] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60b2992f-3c1a-4376-aaa0-446589b7809e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.042663] env[62109]: DEBUG oslo_vmware.api [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1116996, 'name': PowerOnVM_Task, 'duration_secs': 1.237547} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.042912] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1116998, 'name': PowerOffVM_Task, 'duration_secs': 0.347734} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.044431] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1008.044647] env[62109]: INFO nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1008.044841] env[62109]: DEBUG nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1008.045141] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1008.045318] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1008.045591] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1008.045591] env[62109]: value = "task-1117001" [ 1008.045591] env[62109]: _type = "Task" [ 1008.045591] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.046748] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e0059-9be7-4cac-84f2-2e6aecfdbe63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.049277] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1969ca3-751a-433f-a8dd-955700d9b613 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.061521] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.131334] env[62109]: DEBUG nova.objects.base [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1008.132328] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e35a515-4347-4e02-81c1-797239789778 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.157113] env[62109]: DEBUG nova.network.neutron [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updated VIF entry in instance network info cache for port 8b33420f-fd6d-43fc-b4e1-141768c6024b. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1008.157506] env[62109]: DEBUG nova.network.neutron [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.158904] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2809ce17-17fe-4c13-88c7-fad718b09e6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.165393] env[62109]: DEBUG oslo_vmware.api [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1008.165393] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f190ba-c443-21a8-1a9b-93969f9c00b7" [ 1008.165393] env[62109]: _type = "Task" [ 1008.165393] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.174396] env[62109]: DEBUG oslo_vmware.api [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f190ba-c443-21a8-1a9b-93969f9c00b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.302086] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.560645] env[62109]: DEBUG oslo_vmware.api [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117001, 'name': ReconfigVM_Task, 'duration_secs': 0.163054} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.561018] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244509', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'name': 'volume-52569525-8081-4e16-8b50-2801101cb579', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5842e112-d3ef-4ce9-91cc-198e68d12422', 'attached_at': '', 'detached_at': '', 'volume_id': '52569525-8081-4e16-8b50-2801101cb579', 'serial': '52569525-8081-4e16-8b50-2801101cb579'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1008.575668] env[62109]: INFO nova.compute.manager [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Took 18.61 seconds to build instance. [ 1008.663261] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e145f30-681a-4aa5-94dc-cf8489b57428 req-fdfaa63d-fa65-4b39-ada5-483c57dca9fa service nova] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.677927] env[62109]: DEBUG oslo_vmware.api [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f190ba-c443-21a8-1a9b-93969f9c00b7, 'name': SearchDatastore_Task, 'duration_secs': 0.024241} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.678282] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.678539] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.790180] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1008.790180] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1008.790491] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleting the datastore file [datastore1] f72ca981-1bba-44d9-854f-7677f1a0c764 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.790729] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0966ed81-37b9-4d44-a8d2-205644f5cccf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.804028] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1008.804028] env[62109]: value = "task-1117003" [ 1008.804028] env[62109]: _type = "Task" [ 1008.804028] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.808108] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117000, 'name': ReconfigVM_Task, 'duration_secs': 0.791354} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.811611] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfigured VM instance instance-00000055 to attach disk [datastore2] volume-0ab70d08-11b8-4801-8d13-3c142199f3d4/volume-0ab70d08-11b8-4801-8d13-3c142199f3d4.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.817564] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b59309b9-8460-4e72-8c08-6f4e58c50fd7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.835889] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.837650] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1008.837650] env[62109]: value = "task-1117004" [ 1008.837650] env[62109]: _type = "Task" [ 1008.837650] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.848673] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.077079] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e7e2b528-cd68-4cd0-966a-861241da1f24 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.117s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.116683] env[62109]: DEBUG nova.objects.instance [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'flavor' on Instance uuid 5842e112-d3ef-4ce9-91cc-198e68d12422 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.304934] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.306832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.322371] env[62109]: DEBUG oslo_vmware.api [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279014} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.326367] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.326772] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1009.327728] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1009.327728] env[62109]: INFO nova.compute.manager [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1009.327728] env[62109]: DEBUG oslo.service.loopingcall [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.328367] env[62109]: DEBUG nova.compute.manager [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1009.328448] env[62109]: DEBUG nova.network.neutron [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1009.353063] env[62109]: DEBUG oslo_vmware.api [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117004, 'name': ReconfigVM_Task, 'duration_secs': 0.157073} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.353448] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244518', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'name': 'volume-0ab70d08-11b8-4801-8d13-3c142199f3d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'serial': '0ab70d08-11b8-4801-8d13-3c142199f3d4'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1009.430663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747d5711-0228-48eb-a133-2ded8f227657 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.439055] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6eb995-41a7-4ac1-839f-d7a596f55596 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.469297] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8428d399-cbd1-4397-9ec4-7c99ce8e788f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.477775] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25baec0-5218-4371-9e2f-17043d37f6fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.493294] env[62109]: DEBUG nova.compute.provider_tree [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.742047] env[62109]: DEBUG nova.compute.manager [req-df9150d1-9750-4e1e-b3ac-2c33ce40cfb2 req-62319b24-bfa1-4bcd-94fa-756a0bebcabd service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Received event network-vif-deleted-86b53aab-cc69-467f-be7d-15ca2b456351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.742274] env[62109]: INFO nova.compute.manager [req-df9150d1-9750-4e1e-b3ac-2c33ce40cfb2 req-62319b24-bfa1-4bcd-94fa-756a0bebcabd service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Neutron deleted interface 86b53aab-cc69-467f-be7d-15ca2b456351; detaching it from the instance and deleting it from the info cache [ 1009.742475] env[62109]: DEBUG nova.network.neutron [req-df9150d1-9750-4e1e-b3ac-2c33ce40cfb2 req-62319b24-bfa1-4bcd-94fa-756a0bebcabd service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.810652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.810652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.812013] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e31f75-b7d3-484f-bddf-d3e7ac21965c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.831997] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc210bb-c3b6-47fc-ba76-bd605a2e00fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.866600] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfiguring VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1009.868338] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3db85c72-8cdd-42b5-9110-78d26375e1a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.888498] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1009.888498] env[62109]: value = "task-1117005" [ 1009.888498] env[62109]: _type = "Task" [ 1009.888498] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.897328] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.996164] env[62109]: DEBUG nova.scheduler.client.report [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.123745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-64db641d-9118-4773-9199-72f02e0f4952 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.268s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.208483] env[62109]: DEBUG nova.compute.manager [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1010.217562] env[62109]: DEBUG nova.network.neutron [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.244799] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fafd90ad-cf6c-4ccb-9adb-dd3ad6e5e182 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.255158] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928809c5-5d02-459d-8fd8-3c4042ab3f0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.288319] env[62109]: DEBUG nova.compute.manager [req-df9150d1-9750-4e1e-b3ac-2c33ce40cfb2 req-62319b24-bfa1-4bcd-94fa-756a0bebcabd service nova] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Detach interface failed, port_id=86b53aab-cc69-467f-be7d-15ca2b456351, reason: Instance f72ca981-1bba-44d9-854f-7677f1a0c764 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1010.338379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.338727] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.338966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.339191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.339376] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.341761] env[62109]: INFO nova.compute.manager [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Terminating instance [ 1010.343782] env[62109]: DEBUG nova.compute.manager [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1010.343981] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1010.344907] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353bbe5e-9b6e-4772-9ae7-d18c165fce5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.353712] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1010.354014] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-901973d4-b204-4569-8f97-d02c65c2ba93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.360613] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1010.360613] env[62109]: value = "task-1117006" [ 1010.360613] env[62109]: _type = "Task" [ 1010.360613] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.369152] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.399694] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.432521] env[62109]: DEBUG nova.objects.instance [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.721023] env[62109]: INFO nova.compute.manager [-] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Took 1.39 seconds to deallocate network for instance. [ 1010.735433] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.873018] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117006, 'name': PowerOffVM_Task, 'duration_secs': 0.236451} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.873018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1010.873018] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1010.873018] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ace4bf4-da76-4154-816d-c79405ab69c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.900290] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.939896] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6b5dd2ad-4948-4998-a755-9a304540c428 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.843s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.008805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.329s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.014603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.277s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.228460] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.405691] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.519727] env[62109]: INFO nova.compute.claims [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.584578] env[62109]: INFO nova.scheduler.client.report [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocation for migration 31810d44-a7b5-4c62-8d96-6e06b6ac02b1 [ 1011.900866] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.028609] env[62109]: INFO nova.compute.resource_tracker [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating resource usage from migration 67e3e12d-1802-4d1a-a8d3-cf456396b721 [ 1012.049101] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1012.049347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1012.049541] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore1] 5842e112-d3ef-4ce9-91cc-198e68d12422 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.049834] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aac461fd-a8da-4e10-83a9-c2da4950081e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.058944] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1012.058944] env[62109]: value = "task-1117008" [ 1012.058944] env[62109]: _type = "Task" [ 1012.058944] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.072923] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.094434] env[62109]: DEBUG oslo_concurrency.lockutils [None req-40178712-d3cd-45c0-9b3e-63178340db50 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.986s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.249300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "f5b81761-6db9-4260-8876-435bac74b027" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.249727] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.358804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f194a380-9d3d-4633-9c11-9342a5efd88f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.368995] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803db48f-d368-4c10-839c-01930fbd02ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.408816] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a31541-57c5-4794-9d9a-0400a5c8bb34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.422521] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.426793] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b4d78e-027d-488e-bfbb-8cb31ae2d1d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.440254] env[62109]: DEBUG nova.compute.provider_tree [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.545039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.545039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.569876] env[62109]: DEBUG oslo_vmware.api [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.471742} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.571558] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.571558] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1012.571558] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1012.571558] env[62109]: INFO nova.compute.manager [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1012.571558] env[62109]: DEBUG oslo.service.loopingcall [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.571558] env[62109]: DEBUG nova.compute.manager [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1012.571558] env[62109]: DEBUG nova.network.neutron [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1012.751914] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1012.916936] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.943717] env[62109]: DEBUG nova.scheduler.client.report [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.047525] env[62109]: DEBUG nova.compute.utils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1013.090638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.090912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.091147] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.091346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.091685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.095161] env[62109]: INFO nova.compute.manager [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Terminating instance [ 1013.096261] env[62109]: DEBUG nova.compute.manager [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1013.096334] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1013.097373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f3d842-84ac-4403-8938-bb648b9cda0e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.105859] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1013.106153] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51636cb8-d1af-4a7d-a42a-2ea0820881ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.114247] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1013.114247] env[62109]: value = "task-1117009" [ 1013.114247] env[62109]: _type = "Task" [ 1013.114247] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.126832] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.277991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.318518] env[62109]: DEBUG nova.compute.manager [req-62405e1b-8c02-461b-945f-d9d1dc349364 req-b490344a-a939-42c8-8960-9d36ceb9bba8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Received event network-vif-deleted-b4e51bf8-f6dd-4890-81ac-da83edf6812c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1013.318518] env[62109]: INFO nova.compute.manager [req-62405e1b-8c02-461b-945f-d9d1dc349364 req-b490344a-a939-42c8-8960-9d36ceb9bba8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Neutron deleted interface b4e51bf8-f6dd-4890-81ac-da83edf6812c; detaching it from the instance and deleting it from the info cache [ 1013.318518] env[62109]: DEBUG nova.network.neutron [req-62405e1b-8c02-461b-945f-d9d1dc349364 req-b490344a-a939-42c8-8960-9d36ceb9bba8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.416699] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.452052] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.436s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.452052] env[62109]: INFO nova.compute.manager [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Migrating [ 1013.456630] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.228s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.457454] env[62109]: DEBUG nova.objects.instance [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'resources' on Instance uuid f72ca981-1bba-44d9-854f-7677f1a0c764 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.552312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.626750] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117009, 'name': PowerOffVM_Task, 'duration_secs': 0.403496} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.626750] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1013.626750] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1013.626750] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89f0d5f6-58aa-439a-86b1-b9ec8a4ffcdb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.648105] env[62109]: DEBUG nova.network.neutron [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.683469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.683469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.683469] env[62109]: INFO nova.compute.manager [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Rebooting instance [ 1013.705734] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1013.708023] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1013.708023] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore1] dfebeee8-06be-424b-89b0-7c1a3d4703eb {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1013.708023] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6795a0a-d9ca-454b-b5cf-af119dcc6619 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.716018] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1013.716018] env[62109]: value = "task-1117011" [ 1013.716018] env[62109]: _type = "Task" [ 1013.716018] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.723034] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.747529] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.747529] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.824515] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93213ae-5876-4144-805c-90b55010c5c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.832523] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c3c8c9-db87-4b5f-850e-a6fa55567b28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.865539] env[62109]: DEBUG nova.compute.manager [req-62405e1b-8c02-461b-945f-d9d1dc349364 req-b490344a-a939-42c8-8960-9d36ceb9bba8 service nova] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Detach interface failed, port_id=b4e51bf8-f6dd-4890-81ac-da83edf6812c, reason: Instance 5842e112-d3ef-4ce9-91cc-198e68d12422 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1013.916464] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.974619] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.974815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.974997] env[62109]: DEBUG nova.network.neutron [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1014.151437] env[62109]: INFO nova.compute.manager [-] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Took 1.58 seconds to deallocate network for instance. [ 1014.175501] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48799162-7da8-497e-9cfc-1744699e1bf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.185016] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79564a7e-2378-4ad6-8f8e-3e33874fc56d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.220588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.220786] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.220967] env[62109]: DEBUG nova.network.neutron [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1014.225379] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450347f4-e93f-4ec8-88a4-7a7ecaa4f813 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.233886] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.236837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86759ded-9925-4490-a891-1407b0a6e11b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.251043] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1014.253941] env[62109]: DEBUG nova.compute.provider_tree [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.416981] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.614653] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.614919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.615177] env[62109]: INFO nova.compute.manager [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attaching volume c1ea88f9-7236-4370-a00a-5b149ceb966b to /dev/sdc [ 1014.656513] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc800dfe-478f-46b9-bb10-387da6b8aec6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.660746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.668474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14112293-ae10-486d-af36-355a759b96ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.681266] env[62109]: DEBUG nova.virt.block_device [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating existing volume attachment record: 7751b52f-cff7-400e-830c-d9f5f02b0be7 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1014.732113] env[62109]: DEBUG oslo_vmware.api [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.664262} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.732505] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.732766] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1014.732962] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1014.733172] env[62109]: INFO nova.compute.manager [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1014.733430] env[62109]: DEBUG oslo.service.loopingcall [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.733930] env[62109]: DEBUG nova.compute.manager [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1014.734044] env[62109]: DEBUG nova.network.neutron [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1014.758747] env[62109]: DEBUG nova.scheduler.client.report [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.785758] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.795491] env[62109]: DEBUG nova.network.neutron [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.919984] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.121247] env[62109]: DEBUG nova.network.neutron [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.195685] env[62109]: DEBUG nova.compute.manager [req-25f441ca-c392-4132-a23c-c0ddb9d89461 req-e34dc3a1-ee5a-4a64-9f15-50831cc8423d service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Received event network-vif-deleted-e5781b49-4005-4203-8390-dc6af21b6eda {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1015.196123] env[62109]: INFO nova.compute.manager [req-25f441ca-c392-4132-a23c-c0ddb9d89461 req-e34dc3a1-ee5a-4a64-9f15-50831cc8423d service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Neutron deleted interface e5781b49-4005-4203-8390-dc6af21b6eda; detaching it from the instance and deleting it from the info cache [ 1015.196181] env[62109]: DEBUG nova.network.neutron [req-25f441ca-c392-4132-a23c-c0ddb9d89461 req-e34dc3a1-ee5a-4a64-9f15-50831cc8423d service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.271899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.815s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.276594] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.998s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.277505] env[62109]: INFO nova.compute.claims [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.295897] env[62109]: INFO nova.scheduler.client.report [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted allocations for instance f72ca981-1bba-44d9-854f-7677f1a0c764 [ 1015.300611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.419601] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.624911] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.628394] env[62109]: DEBUG nova.compute.manager [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1015.629365] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d619da-90d9-4d1a-a2cd-dde6f2cf7d1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.672626] env[62109]: DEBUG nova.network.neutron [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.700017] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7fe350c-2e54-48d4-aa61-054b29f6384b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.709333] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ed57ff-2d96-49d9-bc2c-926586cca2cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.740348] env[62109]: DEBUG nova.compute.manager [req-25f441ca-c392-4132-a23c-c0ddb9d89461 req-e34dc3a1-ee5a-4a64-9f15-50831cc8423d service nova] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Detach interface failed, port_id=e5781b49-4005-4203-8390-dc6af21b6eda, reason: Instance dfebeee8-06be-424b-89b0-7c1a3d4703eb could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1015.810029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4504e92b-d022-444e-be5c-97859698938e tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "f72ca981-1bba-44d9-854f-7677f1a0c764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.319s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.919206] env[62109]: DEBUG oslo_vmware.api [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117005, 'name': ReconfigVM_Task, 'duration_secs': 5.832267} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.919468] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.919682] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Reconfigured VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1016.175963] env[62109]: INFO nova.compute.manager [-] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Took 1.44 seconds to deallocate network for instance. [ 1016.489162] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4d0431-02f9-4458-8a5b-ed40bdd7ea49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.497057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93aeff6-7e84-4bd8-8199-030b4c22e7a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.527179] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a7acb8-44c7-4e4d-970e-30138a0d6d5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.534781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463ed930-ebb0-46f0-a7bd-1a45cdad9025 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.547530] env[62109]: DEBUG nova.compute.provider_tree [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.645699] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39941ca4-a073-475e-8bfc-4b0c32cb4f03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.653741] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Doing hard reboot of VM {{(pid=62109) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 1016.653984] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-7dc6057e-4f7b-4409-9dd3-14c31de441b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.660037] env[62109]: DEBUG oslo_vmware.api [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1016.660037] env[62109]: value = "task-1117013" [ 1016.660037] env[62109]: _type = "Task" [ 1016.660037] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.667612] env[62109]: DEBUG oslo_vmware.api [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117013, 'name': ResetVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.681736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.817717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da00dcca-e117-49de-9906-6da37b83be39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.839321] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1017.050446] env[62109]: DEBUG nova.scheduler.client.report [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.171197] env[62109]: DEBUG oslo_vmware.api [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117013, 'name': ResetVM_Task, 'duration_secs': 0.119503} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.171492] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Did hard reboot of VM {{(pid=62109) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 1017.171676] env[62109]: DEBUG nova.compute.manager [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1017.172455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e336ef7-9ad2-4e7e-88de-bafbd810a2e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.256763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.256923] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.257144] env[62109]: DEBUG nova.network.neutron [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1017.347615] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1017.347972] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07cdf919-9baf-46f2-9c7f-fd0d86763c05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.356518] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1017.356518] env[62109]: value = "task-1117015" [ 1017.356518] env[62109]: _type = "Task" [ 1017.356518] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.366207] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.394993] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.395598] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.558819] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.558819] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1017.559734] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.899s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.560173] env[62109]: DEBUG nova.objects.instance [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'resources' on Instance uuid 5842e112-d3ef-4ce9-91cc-198e68d12422 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.685174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e08bcfa1-6a25-4fb0-b915-c9c7eed2be26 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.002s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.865021] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117015, 'name': PowerOffVM_Task, 'duration_secs': 0.181566} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.867213] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1017.867406] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1017.901757] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.901928] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1018.000732] env[62109]: INFO nova.network.neutron [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Port fdebe937-16f4-47b6-982f-2a88b25aa054 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1018.000732] env[62109]: DEBUG nova.network.neutron [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.063968] env[62109]: DEBUG nova.compute.utils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1018.064912] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1018.064912] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1018.119015] env[62109]: DEBUG nova.policy [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55b37a32a29b4362980594ec488273a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a9a723ee43464fabb315c16b617beb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1018.233753] env[62109]: DEBUG nova.compute.manager [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.233753] env[62109]: DEBUG nova.compute.manager [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing instance network info cache due to event network-changed-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1018.233753] env[62109]: DEBUG oslo_concurrency.lockutils [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] Acquiring lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.308942] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed3cbcb-5045-41a2-b14a-95af2a989c49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.316808] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfa1305-4200-4704-a013-fbdab6bed9e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.346753] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0901498b-98fd-4a2a-a626-d046cdc5ddb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.356814] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ff5ab2-6573-4d80-849b-c211c92d42e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.371361] env[62109]: DEBUG nova.compute.provider_tree [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1018.375291] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.375291] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.375291] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.375291] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.375291] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.375479] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.375597] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.375682] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.375862] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.376042] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.376306] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.385295] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a30887d3-5983-43c7-9190-cbe1555a5a8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.399280] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Successfully created port: 1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.402878] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1018.402878] env[62109]: value = "task-1117016" [ 1018.402878] env[62109]: _type = "Task" [ 1018.402878] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.414316] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117016, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.503611] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.506400] env[62109]: DEBUG oslo_concurrency.lockutils [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] Acquired lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.506772] env[62109]: DEBUG nova.network.neutron [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Refreshing network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1018.571710] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1018.901495] env[62109]: ERROR nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [req-aeabdfa7-148c-46e7-ab67-086c3256a12d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-aeabdfa7-148c-46e7-ab67-086c3256a12d"}]} [ 1018.914256] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117016, 'name': ReconfigVM_Task, 'duration_secs': 0.219624} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.914256] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1018.919523] env[62109]: DEBUG nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1018.928467] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.928806] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.929319] env[62109]: DEBUG nova.objects.instance [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'flavor' on Instance uuid b5410f60-c5fb-4325-8d42-8745c310a6ca {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.942430] env[62109]: DEBUG nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1018.942430] env[62109]: DEBUG nova.compute.provider_tree [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1018.955580] env[62109]: DEBUG nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1018.978558] env[62109]: DEBUG nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1019.010131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b20b6abc-d153-4480-8956-919d20ccd9d6 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-6b5a009e-28f5-4be7-8641-089abe359954-fdebe937-16f4-47b6-982f-2a88b25aa054" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.705s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.243466] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1019.243701] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244519', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'name': 'volume-c1ea88f9-7236-4370-a00a-5b149ceb966b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'serial': 'c1ea88f9-7236-4370-a00a-5b149ceb966b'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1019.244644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd6f995-c0eb-488a-9409-78d7d24de443 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.248430] env[62109]: DEBUG nova.network.neutron [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updated VIF entry in instance network info cache for port f8fbaff0-8e7e-4f1c-9709-51d00228bc0d. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1019.249070] env[62109]: DEBUG nova.network.neutron [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [{"id": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "address": "fa:16:3e:1b:aa:bb", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fbaff0-8e", "ovs_interfaceid": "f8fbaff0-8e7e-4f1c-9709-51d00228bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.251986] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b044b0c2-801d-4735-9c5c-32b14f47a417 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.267492] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9163c064-8059-4047-b2a5-f2939bae596e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.273070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8991bb9-80e6-424b-9701-7bcb93ccf8fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.299706] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] volume-c1ea88f9-7236-4370-a00a-5b149ceb966b/volume-c1ea88f9-7236-4370-a00a-5b149ceb966b.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.301107] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a90a7c3-8d43-438f-93df-b804e2c2f55a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.342284] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d22d0a-d836-4bb1-bd43-428f1f357c9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.346866] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1019.346866] env[62109]: value = "task-1117017" [ 1019.346866] env[62109]: _type = "Task" [ 1019.346866] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.352619] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db1cdb-3e94-4ef5-bf52-480e233df548 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.361405] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117017, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.369175] env[62109]: DEBUG nova.compute.provider_tree [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1019.425906] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1019.425906] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1019.425906] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.425906] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1019.426135] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.426196] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1019.426523] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1019.426607] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1019.426735] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1019.426902] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1019.427090] env[62109]: DEBUG nova.virt.hardware [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1019.432437] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1019.435962] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b033e1d-d6aa-4d52-9c8e-f5e8ff1e957f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.455734] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1019.455734] env[62109]: value = "task-1117018" [ 1019.455734] env[62109]: _type = "Task" [ 1019.455734] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.464790] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.539817] env[62109]: DEBUG nova.objects.instance [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'pci_requests' on Instance uuid b5410f60-c5fb-4325-8d42-8745c310a6ca {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.586926] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1019.616890] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1019.617231] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1019.617446] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.617671] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1019.617845] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.617999] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1019.618245] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1019.618421] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1019.618659] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1019.618868] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1019.619068] env[62109]: DEBUG nova.virt.hardware [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1019.619957] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc970ceb-da91-4a2e-a280-fa2d65808a73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.628206] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d44bdb-e90d-48a4-b92d-465459eedaab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.751490] env[62109]: DEBUG oslo_concurrency.lockutils [req-de09a965-9cd8-4464-acd3-fce614b6f4d7 req-e6156089-24e4-4bb2-b72a-c0a4c5521213 service nova] Releasing lock "refresh_cache-6b5a009e-28f5-4be7-8641-089abe359954" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.859585] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.903682] env[62109]: DEBUG nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1019.904086] env[62109]: DEBUG nova.compute.provider_tree [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 130 to 131 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1019.906053] env[62109]: DEBUG nova.compute.provider_tree [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1019.967479] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117018, 'name': ReconfigVM_Task, 'duration_secs': 0.293417} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.967780] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1019.968651] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da618655-733b-4318-8235-929aac3db0bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.992687] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.992996] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16c0a780-eff5-4a39-b5a6-7816c0a0ff3e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.011208] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1020.011208] env[62109]: value = "task-1117019" [ 1020.011208] env[62109]: _type = "Task" [ 1020.011208] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.019016] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.043575] env[62109]: DEBUG nova.objects.base [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1020.043575] env[62109]: DEBUG nova.network.neutron [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1020.131165] env[62109]: DEBUG nova.compute.manager [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Received event network-vif-plugged-1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.131428] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] Acquiring lock "f5b81761-6db9-4260-8876-435bac74b027-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.131633] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] Lock "f5b81761-6db9-4260-8876-435bac74b027-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.131806] env[62109]: DEBUG oslo_concurrency.lockutils [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] Lock "f5b81761-6db9-4260-8876-435bac74b027-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.131977] env[62109]: DEBUG nova.compute.manager [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] No waiting events found dispatching network-vif-plugged-1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1020.132406] env[62109]: WARNING nova.compute.manager [req-2e79f525-9b39-44d5-81dd-baa6a7262da2 req-e4e3b482-3b23-4f37-8970-e44ee94d14d9 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Received unexpected event network-vif-plugged-1568441e-6c95-41db-b95d-e0029a3b218a for instance with vm_state building and task_state spawning. [ 1020.137930] env[62109]: DEBUG nova.policy [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491fd4e791924dafb155dd356bf20aa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ee24c114bd495e8f29eeda1f6b8bba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1020.212190] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Successfully updated port: 1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.257454] env[62109]: DEBUG nova.compute.manager [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.257454] env[62109]: DEBUG nova.compute.manager [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing instance network info cache due to event network-changed-bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1020.257758] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.257758] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.258982] env[62109]: DEBUG nova.network.neutron [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1020.360584] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117017, 'name': ReconfigVM_Task, 'duration_secs': 0.609332} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.361071] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfigured VM instance instance-00000055 to attach disk [datastore2] volume-c1ea88f9-7236-4370-a00a-5b149ceb966b/volume-c1ea88f9-7236-4370-a00a-5b149ceb966b.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.368012] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e417fab5-308c-45f1-9ca0-eced6a7de4ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.385092] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1020.385092] env[62109]: value = "task-1117020" [ 1020.385092] env[62109]: _type = "Task" [ 1020.385092] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.392863] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117020, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.411494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.850s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.413458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.627s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.414728] env[62109]: INFO nova.compute.claims [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.439059] env[62109]: INFO nova.scheduler.client.report [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted allocations for instance 5842e112-d3ef-4ce9-91cc-198e68d12422 [ 1020.521457] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117019, 'name': ReconfigVM_Task, 'duration_secs': 0.255376} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.521816] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 2fddcd6c-241e-4591-acec-12487909355c/2fddcd6c-241e-4591-acec-12487909355c.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.522114] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1020.715386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.715623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquired lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.715755] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1020.894521] env[62109]: DEBUG oslo_vmware.api [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117020, 'name': ReconfigVM_Task, 'duration_secs': 0.147739} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.894870] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244519', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'name': 'volume-c1ea88f9-7236-4370-a00a-5b149ceb966b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'serial': 'c1ea88f9-7236-4370-a00a-5b149ceb966b'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1020.949341] env[62109]: DEBUG oslo_concurrency.lockutils [None req-819e638b-d96a-4b42-b256-5eefee00ba6f tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "5842e112-d3ef-4ce9-91cc-198e68d12422" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.610s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.994260] env[62109]: DEBUG nova.network.neutron [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updated VIF entry in instance network info cache for port bef2387d-4fe5-4a29-89fe-d990d0e93b2a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1020.994665] env[62109]: DEBUG nova.network.neutron [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.028777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1b4cc0-48e9-4133-9982-f4e9c95d10e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.048093] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5964023-6df2-4556-9f62-d58f8e3fd2d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.066570] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1021.249190] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1021.262029] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.262168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.377880] env[62109]: DEBUG nova.network.neutron [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Updating instance_info_cache with network_info: [{"id": "1568441e-6c95-41db-b95d-e0029a3b218a", "address": "fa:16:3e:fe:74:f7", "network": {"id": "53591656-c52b-4447-ba59-0e1f7a706980", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1896491840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a9a723ee43464fabb315c16b617beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1568441e-6c", "ovs_interfaceid": "1568441e-6c95-41db-b95d-e0029a3b218a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.437379] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1021.437379] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.437496] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.437610] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.437761] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.437904] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.438261] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.438261] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1021.438351] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.499333] env[62109]: DEBUG oslo_concurrency.lockutils [req-1c0a9de4-cf5d-4748-8ed8-ff0b98c9c86a req-f24130d8-a0ae-45be-a646-cfd898ba84dc service nova] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.628649] env[62109]: DEBUG nova.network.neutron [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Port 879d4990-6187-4722-b8c5-0c20f9fa59cc binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1021.648160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae06bac-32cd-4d6d-8279-cc92282d7cae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.659621] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae01d86e-72ef-4bde-ae79-d34793f6a0cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.690208] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7820246-053c-45f0-a200-faffe8dd37e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.697497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f44f08-bbc8-4fd1-84f8-bb028ecfd638 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.713530] env[62109]: DEBUG nova.compute.provider_tree [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.713530] env[62109]: DEBUG nova.network.neutron [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Successfully updated port: fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.767630] env[62109]: INFO nova.compute.manager [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Detaching volume c2c1fe43-1dae-4c41-a564-3d09f609743e [ 1021.813396] env[62109]: INFO nova.virt.block_device [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Attempting to driver detach volume c2c1fe43-1dae-4c41-a564-3d09f609743e from mountpoint /dev/sdb [ 1021.813396] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1021.813396] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244500', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'name': 'volume-c2c1fe43-1dae-4c41-a564-3d09f609743e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '39c17e34-c8c0-4a66-8d22-717efcb984bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'serial': 'c2c1fe43-1dae-4c41-a564-3d09f609743e'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1021.814746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb28f78d-5f57-4b71-bbb9-4bd40e560019 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.841096] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b95d9b-12f3-4142-8f69-b6fe32ef0c0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.848875] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6067fdce-b793-4ef0-9f24-5e0264e4f4b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.873832] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a79d09-6ff8-475c-a694-13bcbee208ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.890302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Releasing lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.891031] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Instance network_info: |[{"id": "1568441e-6c95-41db-b95d-e0029a3b218a", "address": "fa:16:3e:fe:74:f7", "network": {"id": "53591656-c52b-4447-ba59-0e1f7a706980", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1896491840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a9a723ee43464fabb315c16b617beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1568441e-6c", "ovs_interfaceid": "1568441e-6c95-41db-b95d-e0029a3b218a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1021.891031] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] The volume has not been displaced from its original location: [datastore1] volume-c2c1fe43-1dae-4c41-a564-3d09f609743e/volume-c2c1fe43-1dae-4c41-a564-3d09f609743e.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1021.897183] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfiguring VM instance instance-0000004f to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1021.897674] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:74:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1568441e-6c95-41db-b95d-e0029a3b218a', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.904675] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Creating folder: Project (e3a9a723ee43464fabb315c16b617beb). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1021.904919] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c1a6448-522b-4169-83a5-5627a2e40f98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.918161] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b431605b-590e-49c6-b070-cd1a4694f46f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.925828] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1021.925828] env[62109]: value = "task-1117022" [ 1021.925828] env[62109]: _type = "Task" [ 1021.925828] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.929893] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Created folder: Project (e3a9a723ee43464fabb315c16b617beb) in parent group-v244329. [ 1021.930103] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Creating folder: Instances. Parent ref: group-v244520. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1021.930700] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffa56aa2-7023-4293-908b-8d42c638f80f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.932805] env[62109]: DEBUG nova.objects.instance [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.937585] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.940784] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.944948] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Created folder: Instances in parent group-v244520. [ 1021.945190] env[62109]: DEBUG oslo.service.loopingcall [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.945387] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1021.945596] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cf8bf35-d75d-4ee0-aeca-796b36b3b156 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.965260] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.965260] env[62109]: value = "task-1117024" [ 1021.965260] env[62109]: _type = "Task" [ 1021.965260] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.972853] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117024, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.215343] env[62109]: DEBUG nova.scheduler.client.report [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1022.219729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.219729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.219729] env[62109]: DEBUG nova.network.neutron [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1022.436234] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117022, 'name': ReconfigVM_Task, 'duration_secs': 0.276364} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.436634] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Reconfigured VM instance instance-0000004f to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1022.442974] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b78a6f8-8137-4629-bd75-119d3f5d9b41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.453332] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d460e493-843f-4b01-9058-ec1e8d52290f tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.838s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.461263] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1022.461263] env[62109]: value = "task-1117025" [ 1022.461263] env[62109]: _type = "Task" [ 1022.461263] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.467964] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.476808] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117024, 'name': CreateVM_Task, 'duration_secs': 0.29315} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.477035] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1022.477770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.477939] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.478305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1022.478643] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6555e1f2-56a2-4a74-902d-6c395b3134ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.483631] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1022.483631] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f7843e-dd0c-88fb-3bd3-9f82d3cdb99d" [ 1022.483631] env[62109]: _type = "Task" [ 1022.483631] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.494229] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f7843e-dd0c-88fb-3bd3-9f82d3cdb99d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.656252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.656485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.656624] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.687898] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Received event network-changed-1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.688145] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Refreshing instance network info cache due to event network-changed-1568441e-6c95-41db-b95d-e0029a3b218a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1022.688369] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Acquiring lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.688527] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Acquired lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.688682] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Refreshing network info cache for port 1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1022.721685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.722164] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1022.726515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.045s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.726719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.728456] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.788s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.728635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.728783] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1022.730448] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9eb48f-7954-4de5-b3ee-12f76e84e364 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.739242] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e18ab0-c04c-4f8f-8e32-d748aa4497d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.753889] env[62109]: INFO nova.scheduler.client.report [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocations for instance dfebeee8-06be-424b-89b0-7c1a3d4703eb [ 1022.755476] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d9fe23-690c-408e-94f8-b42d058a181f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.766521] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12098b55-3d3e-4c66-a3c6-fed7790b7b94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.770212] env[62109]: WARNING nova.network.neutron [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] cfbec6c5-3421-476e-aca8-de96e0de15af already exists in list: networks containing: ['cfbec6c5-3421-476e-aca8-de96e0de15af']. ignoring it [ 1022.802153] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179905MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1022.802153] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.802340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.969948] env[62109]: DEBUG oslo_vmware.api [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117025, 'name': ReconfigVM_Task, 'duration_secs': 0.145138} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.970325] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244500', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'name': 'volume-c2c1fe43-1dae-4c41-a564-3d09f609743e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '39c17e34-c8c0-4a66-8d22-717efcb984bc', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2c1fe43-1dae-4c41-a564-3d09f609743e', 'serial': 'c2c1fe43-1dae-4c41-a564-3d09f609743e'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1022.994201] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f7843e-dd0c-88fb-3bd3-9f82d3cdb99d, 'name': SearchDatastore_Task, 'duration_secs': 0.009389} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.994531] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.994767] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.995016] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.995171] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.995359] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.995623] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d2b4715-f240-415f-99a7-08cb25072095 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.006455] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.006645] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1023.007369] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23fd5ae6-89b7-4dc4-84d3-80619ca9f0a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.012385] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1023.012385] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52448ae1-fb26-cd93-4959-279f3936604b" [ 1023.012385] env[62109]: _type = "Task" [ 1023.012385] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.023839] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52448ae1-fb26-cd93-4959-279f3936604b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.065162] env[62109]: DEBUG nova.network.neutron [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fdebe937-16f4-47b6-982f-2a88b25aa054", "address": "fa:16:3e:41:30:37", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdebe937-16", "ovs_interfaceid": "fdebe937-16f4-47b6-982f-2a88b25aa054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.230954] env[62109]: DEBUG nova.compute.utils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1023.232395] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1023.233939] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1023.264892] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77887420-c83b-4a4c-8d3a-37ca4ea5ca68 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "dfebeee8-06be-424b-89b0-7c1a3d4703eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.174s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.300118] env[62109]: DEBUG nova.policy [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e608055854844801b9f7c51d07820917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca12aa68e4b4d4d8cf1e3332deb44f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1023.499553] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Updated VIF entry in instance network info cache for port 1568441e-6c95-41db-b95d-e0029a3b218a. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1023.499918] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Updating instance_info_cache with network_info: [{"id": "1568441e-6c95-41db-b95d-e0029a3b218a", "address": "fa:16:3e:fe:74:f7", "network": {"id": "53591656-c52b-4447-ba59-0e1f7a706980", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1896491840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a9a723ee43464fabb315c16b617beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1568441e-6c", "ovs_interfaceid": "1568441e-6c95-41db-b95d-e0029a3b218a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.519018] env[62109]: DEBUG nova.objects.instance [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'flavor' on Instance uuid 39c17e34-c8c0-4a66-8d22-717efcb984bc {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.524745] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52448ae1-fb26-cd93-4959-279f3936604b, 'name': SearchDatastore_Task, 'duration_secs': 0.012304} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.525648] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20e526f0-dd00-47d0-84d6-9526116c367a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.531910] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1023.531910] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c8861a-84f2-22ad-9b97-5100af36ecf9" [ 1023.531910] env[62109]: _type = "Task" [ 1023.531910] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.539651] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c8861a-84f2-22ad-9b97-5100af36ecf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.568420] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.569098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.569272] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.570086] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a91474-4000-47ae-94b1-69da96eb277e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.586692] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.586932] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.587108] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.587301] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.587457] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.587607] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.587810] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.587973] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.588161] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.588329] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.588508] env[62109]: DEBUG nova.virt.hardware [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.594876] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfiguring VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1023.595187] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1b27af4-473d-4b09-be5c-78d317772a05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.612303] env[62109]: DEBUG oslo_vmware.api [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1023.612303] env[62109]: value = "task-1117026" [ 1023.612303] env[62109]: _type = "Task" [ 1023.612303] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.620395] env[62109]: DEBUG oslo_vmware.api [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117026, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.651993] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Successfully created port: b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.704890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.705140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.705331] env[62109]: DEBUG nova.network.neutron [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1023.736310] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1023.812842] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Applying migration context for instance 2fddcd6c-241e-4591-acec-12487909355c as it has an incoming, in-progress migration 67e3e12d-1802-4d1a-a8d3-cf456396b721. Migration status is post-migrating {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1023.814533] env[62109]: INFO nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating resource usage from migration 67e3e12d-1802-4d1a-a8d3-cf456396b721 [ 1023.835870] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836034] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836234] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836292] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836385] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 39c17e34-c8c0-4a66-8d22-717efcb984bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836499] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836609] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 6b5a009e-28f5-4be7-8641-089abe359954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836795] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance a197a73e-32bc-45b0-ae6f-5275cf74285b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836830] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.836920] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance b5410f60-c5fb-4325-8d42-8745c310a6ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.837037] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance bc75898d-7856-4ecb-9640-ec30538fe90f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.837150] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Migration 67e3e12d-1802-4d1a-a8d3-cf456396b721 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1023.837255] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 2fddcd6c-241e-4591-acec-12487909355c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.837360] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance f5b81761-6db9-4260-8876-435bac74b027 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.837466] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 12288104-483b-4bb4-9e33-05bf5d7be3a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.906059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.906300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.907441] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.907660] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.002626] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Releasing lock "refresh_cache-f5b81761-6db9-4260-8876-435bac74b027" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.002880] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.003126] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.003405] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.003622] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.003887] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] No waiting events found dispatching network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.004133] env[62109]: WARNING nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received unexpected event network-vif-plugged-fdebe937-16f4-47b6-982f-2a88b25aa054 for instance with vm_state active and task_state None. [ 1024.004501] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-changed-fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.004741] env[62109]: DEBUG nova.compute.manager [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing instance network info cache due to event network-changed-fdebe937-16f4-47b6-982f-2a88b25aa054. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1024.005012] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.005223] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.005454] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Refreshing network info cache for port fdebe937-16f4-47b6-982f-2a88b25aa054 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1024.042647] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52c8861a-84f2-22ad-9b97-5100af36ecf9, 'name': SearchDatastore_Task, 'duration_secs': 0.009575} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.042941] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.043237] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] f5b81761-6db9-4260-8876-435bac74b027/f5b81761-6db9-4260-8876-435bac74b027.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1024.043502] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20bae2b5-04b5-4456-8a31-a9c292de5b33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.050405] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1024.050405] env[62109]: value = "task-1117027" [ 1024.050405] env[62109]: _type = "Task" [ 1024.050405] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.058489] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.122294] env[62109]: DEBUG oslo_vmware.api [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117026, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.343795] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1024.344085] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1024.344293] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1024.412676] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1024.415752] env[62109]: INFO nova.compute.manager [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Detaching volume 0ab70d08-11b8-4801-8d13-3c142199f3d4 [ 1024.430443] env[62109]: DEBUG nova.network.neutron [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.460511] env[62109]: INFO nova.virt.block_device [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attempting to driver detach volume 0ab70d08-11b8-4801-8d13-3c142199f3d4 from mountpoint /dev/sdb [ 1024.460776] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1024.460969] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244518', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'name': 'volume-0ab70d08-11b8-4801-8d13-3c142199f3d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'serial': '0ab70d08-11b8-4801-8d13-3c142199f3d4'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1024.461981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec16da3c-bff5-4d61-8157-ea5adda7a985 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.496098] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9a1ebc-bf22-483a-9951-34783bf73485 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.504036] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fca0f6-a2b9-47d5-b996-df3835fe7fb9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.535422] env[62109]: DEBUG oslo_concurrency.lockutils [None req-42bddcfd-6e77-4ab1-a861-9ebe7ad42515 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.537757] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3482151f-6a30-4878-9e12-32ce7e9b4d61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.553014] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] The volume has not been displaced from its original location: [datastore2] volume-0ab70d08-11b8-4801-8d13-3c142199f3d4/volume-0ab70d08-11b8-4801-8d13-3c142199f3d4.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1024.558517] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfiguring VM instance instance-00000055 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1024.564698] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d42995d-ac68-4d25-9769-027cf68246c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.586152] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512933} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.587396] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] f5b81761-6db9-4260-8876-435bac74b027/f5b81761-6db9-4260-8876-435bac74b027.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1024.587624] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1024.587939] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1024.587939] env[62109]: value = "task-1117028" [ 1024.587939] env[62109]: _type = "Task" [ 1024.587939] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.590365] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb11d276-16ee-4c29-b584-1b1582c8815f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.605014] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117028, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.605419] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1024.605419] env[62109]: value = "task-1117029" [ 1024.605419] env[62109]: _type = "Task" [ 1024.605419] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.617022] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.629194] env[62109]: DEBUG oslo_vmware.api [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117026, 'name': ReconfigVM_Task, 'duration_secs': 0.529413} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.630245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.630245] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfigured VM to attach interface {{(pid=62109) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1024.659497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75852aa8-efff-409c-b665-48a476aac6bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.670706] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e8ce08-409b-4d11-ac09-dd6806768bde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.707321] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a773c37-f51c-451a-897e-5cc23c0142d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.715553] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8073c6-deb0-4a75-aec1-481ce3215ea4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.729276] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.747149] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1024.771666] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1024.771924] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1024.772101] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.772295] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1024.772446] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.772629] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1024.772860] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1024.773037] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1024.773243] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1024.773431] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1024.773633] env[62109]: DEBUG nova.virt.hardware [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.774765] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d07c7c-bbda-40e8-a44e-34373f2db2cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.778322] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updated VIF entry in instance network info cache for port fdebe937-16f4-47b6-982f-2a88b25aa054. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1024.778725] env[62109]: DEBUG nova.network.neutron [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fdebe937-16f4-47b6-982f-2a88b25aa054", "address": "fa:16:3e:41:30:37", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdebe937-16", "ovs_interfaceid": "fdebe937-16f4-47b6-982f-2a88b25aa054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.785128] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c52c4d-879b-4664-b828-1648934dac14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.932959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.939182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.102641] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117028, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.117055] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071798} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.117346] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1025.118196] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad87e2a-d43c-425e-9475-57e7fe0f8afc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.135015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99f3a6f1-c10d-4dcf-8846-5e440612346c tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.206s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.144989] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] f5b81761-6db9-4260-8876-435bac74b027/f5b81761-6db9-4260-8876-435bac74b027.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.145699] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6742b845-45d4-47ff-b52d-bc94a846dc4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.167653] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1025.167653] env[62109]: value = "task-1117030" [ 1025.167653] env[62109]: _type = "Task" [ 1025.167653] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.172813] env[62109]: DEBUG nova.compute.manager [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Received event network-vif-plugged-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.173044] env[62109]: DEBUG oslo_concurrency.lockutils [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] Acquiring lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.173292] env[62109]: DEBUG oslo_concurrency.lockutils [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.173485] env[62109]: DEBUG oslo_concurrency.lockutils [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.173652] env[62109]: DEBUG nova.compute.manager [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] No waiting events found dispatching network-vif-plugged-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1025.173817] env[62109]: WARNING nova.compute.manager [req-d5b9aacb-20ba-4be6-b667-e1e78b8b4381 req-cab1e35c-4ddf-49e4-94ac-b79090e0ab71 service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Received unexpected event network-vif-plugged-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f for instance with vm_state building and task_state spawning. [ 1025.179480] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117030, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.250397] env[62109]: ERROR nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [req-7dee1fa8-439f-43b0-8b0a-62c64692b18a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 574e9717-c25e-453d-8028-45d9e2f95398. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7dee1fa8-439f-43b0-8b0a-62c64692b18a"}]} [ 1025.266823] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1025.279876] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1025.280115] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.282544] env[62109]: DEBUG oslo_concurrency.lockutils [req-79357fb6-db6a-4846-a3d6-6084187c6ff2 req-6c9a22d1-1444-4d9f-8927-01bf984c514e service nova] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.293968] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1025.312796] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1025.456481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97257dff-943e-436a-9bec-8ce21b9607c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.481037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac0e88b-18fd-470f-b087-6a22169ef480 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.490671] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1025.521066] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e20b615-dd46-487b-9fee-1860b9a97a54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.529611] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76186167-7a6b-4389-90a4-f25a9dc240d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.563843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba633ec7-8d52-4d8b-b282-34e07eca45ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.571893] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03383b64-9dce-469a-bab0-80cce1e30dc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.585884] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.601643] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117028, 'name': ReconfigVM_Task, 'duration_secs': 0.563743} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.601911] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfigured VM instance instance-00000055 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1025.606632] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52676343-f326-4212-87f7-03602414f29f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.616914] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Successfully updated port: b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.624074] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1025.624074] env[62109]: value = "task-1117031" [ 1025.624074] env[62109]: _type = "Task" [ 1025.624074] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.633316] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117031, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.677885] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117030, 'name': ReconfigVM_Task, 'duration_secs': 0.272404} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.678213] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Reconfigured VM instance instance-0000005f to attach disk [datastore2] f5b81761-6db9-4260-8876-435bac74b027/f5b81761-6db9-4260-8876-435bac74b027.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.678954] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5061f744-8387-4365-87ab-071a16c508d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.686310] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1025.686310] env[62109]: value = "task-1117032" [ 1025.686310] env[62109]: _type = "Task" [ 1025.686310] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.695230] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117032, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.763177] env[62109]: DEBUG nova.compute.manager [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Received event network-changed-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.763379] env[62109]: DEBUG nova.compute.manager [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Refreshing instance network info cache due to event network-changed-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1025.763575] env[62109]: DEBUG oslo_concurrency.lockutils [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] Acquiring lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.763725] env[62109]: DEBUG oslo_concurrency.lockutils [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] Acquired lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.763891] env[62109]: DEBUG nova.network.neutron [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Refreshing network info cache for port b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1025.995203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.995510] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.995725] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.996427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.996635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.999785] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1026.000322] env[62109]: INFO nova.compute.manager [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Terminating instance [ 1026.001685] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d317396b-e165-4471-ba8c-6a5a1634bb56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.003771] env[62109]: DEBUG nova.compute.manager [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1026.003965] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.004786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d1d8cc-f8e0-49a3-a273-874ca4345d1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.012915] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1026.014095] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7b9ee35-17bc-4ea8-a5a8-9f127d76a2f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.015682] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1026.015682] env[62109]: value = "task-1117033" [ 1026.015682] env[62109]: _type = "Task" [ 1026.015682] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.020911] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1026.020911] env[62109]: value = "task-1117034" [ 1026.020911] env[62109]: _type = "Task" [ 1026.020911] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.023826] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.032244] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.119914] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.129521] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1026.129754] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 132 to 133 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1026.129917] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.138872] env[62109]: DEBUG oslo_vmware.api [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117031, 'name': ReconfigVM_Task, 'duration_secs': 0.155496} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.139195] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244518', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'name': 'volume-0ab70d08-11b8-4801-8d13-3c142199f3d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': '0ab70d08-11b8-4801-8d13-3c142199f3d4', 'serial': '0ab70d08-11b8-4801-8d13-3c142199f3d4'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1026.198505] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117032, 'name': Rename_Task, 'duration_secs': 0.165814} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.198913] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1026.199209] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ad13f61-f426-4251-9cb1-e42d607dc73e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.207540] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1026.207540] env[62109]: value = "task-1117035" [ 1026.207540] env[62109]: _type = "Task" [ 1026.207540] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.215061] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.318651] env[62109]: DEBUG nova.network.neutron [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.404557] env[62109]: DEBUG nova.network.neutron [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.526550] env[62109]: DEBUG oslo_vmware.api [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117033, 'name': PowerOnVM_Task, 'duration_secs': 0.406607} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.529657] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1026.529825] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-206569e2-4140-4d7d-9c62-2623d3322297 tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance '2fddcd6c-241e-4591-acec-12487909355c' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1026.541057] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117034, 'name': PowerOffVM_Task, 'duration_secs': 0.24048} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.541057] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1026.541057] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.541057] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99e1714f-91fe-4cda-bb88-790d7ce69e9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.611935] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.612140] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.612339] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleting the datastore file [datastore1] 39c17e34-c8c0-4a66-8d22-717efcb984bc {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.613110] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e81c378a-f83d-4912-94e5-1d8f6f769308 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.620265] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1026.620265] env[62109]: value = "task-1117037" [ 1026.620265] env[62109]: _type = "Task" [ 1026.620265] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.628113] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.634863] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1026.635055] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.833s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.635299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.696s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.637013] env[62109]: INFO nova.compute.claims [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.688499] env[62109]: DEBUG nova.objects.instance [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.716814] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117035, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.756671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.757018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.907537] env[62109]: DEBUG oslo_concurrency.lockutils [req-c821c37e-3fc1-4f95-9b96-3881ab110387 req-c63c82bd-3a68-43c5-a5be-a96c8dccfb6e service nova] Releasing lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.907991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.908236] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1027.130141] env[62109]: DEBUG oslo_vmware.api [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.130429] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.130783] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1027.130783] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.130954] env[62109]: INFO nova.compute.manager [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1027.131219] env[62109]: DEBUG oslo.service.loopingcall [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.131438] env[62109]: DEBUG nova.compute.manager [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1027.131536] env[62109]: DEBUG nova.network.neutron [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.218766] env[62109]: DEBUG oslo_vmware.api [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117035, 'name': PowerOnVM_Task, 'duration_secs': 0.52642} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.218766] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1027.218766] env[62109]: INFO nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1027.218766] env[62109]: DEBUG nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1027.219479] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755e8a3f-3a16-4130-a23a-439a4265c121 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.261125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.261125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.261758] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6041f992-e34b-4049-90c3-642c1a4272f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.281968] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d6c47-ce0f-4467-adbe-496553396275 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.311745] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfiguring VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1027.311745] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba461390-f962-409f-a0ea-27e153fd3771 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.332137] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1027.332137] env[62109]: value = "task-1117038" [ 1027.332137] env[62109]: _type = "Task" [ 1027.332137] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.340379] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.443092] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1027.679334] env[62109]: DEBUG nova.network.neutron [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Updating instance_info_cache with network_info: [{"id": "b56d9eee-64a4-44a1-8e6c-47e14aea0e2f", "address": "fa:16:3e:8d:52:92", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb56d9eee-64", "ovs_interfaceid": "b56d9eee-64a4-44a1-8e6c-47e14aea0e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.700084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-af3b2f81-6ca6-4b06-8f47-b14859e7cbc5 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.790s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.739492] env[62109]: INFO nova.compute.manager [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Took 14.48 seconds to build instance. [ 1027.842698] env[62109]: DEBUG nova.compute.manager [req-5d4f33ca-92c3-4724-abc1-dc2bc70cf434 req-9490ceb5-4d29-448c-9950-4c404c973328 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Received event network-vif-deleted-63549817-3bd1-441c-af9c-739682b35cf2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.842946] env[62109]: INFO nova.compute.manager [req-5d4f33ca-92c3-4724-abc1-dc2bc70cf434 req-9490ceb5-4d29-448c-9950-4c404c973328 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Neutron deleted interface 63549817-3bd1-441c-af9c-739682b35cf2; detaching it from the instance and deleting it from the info cache [ 1027.843080] env[62109]: DEBUG nova.network.neutron [req-5d4f33ca-92c3-4724-abc1-dc2bc70cf434 req-9490ceb5-4d29-448c-9950-4c404c973328 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.847660] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.877474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf95d85-ab06-4288-a245-e1ebc5f13c26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.885257] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601d1710-d1a8-42a7-a70c-1c26d11fca0c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.921659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628c919a-cb47-4056-8a20-76e30d5d0998 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.932984] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e72db1-f160-4847-8409-75647268c354 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.954515] env[62109]: DEBUG nova.compute.provider_tree [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.182217] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "refresh_cache-12288104-483b-4bb4-9e33-05bf5d7be3a8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.182560] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance network_info: |[{"id": "b56d9eee-64a4-44a1-8e6c-47e14aea0e2f", "address": "fa:16:3e:8d:52:92", "network": {"id": "d626de43-699a-4286-a6b1-1e8dfbb47337", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1624240210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca12aa68e4b4d4d8cf1e3332deb44f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6e940e5-e083-4238-973e-f1b4e2a3a5c7", "external-id": "nsx-vlan-transportzone-64", "segmentation_id": 64, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb56d9eee-64", "ovs_interfaceid": "b56d9eee-64a4-44a1-8e6c-47e14aea0e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1028.182995] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:52:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6e940e5-e083-4238-973e-f1b4e2a3a5c7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b56d9eee-64a4-44a1-8e6c-47e14aea0e2f', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.193696] env[62109]: DEBUG oslo.service.loopingcall [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.193918] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "f5b81761-6db9-4260-8876-435bac74b027" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.194144] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1028.194378] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de81543e-b70a-4c2e-a596-12783b72ad23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.210611] env[62109]: DEBUG nova.network.neutron [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.219479] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.219479] env[62109]: value = "task-1117039" [ 1028.219479] env[62109]: _type = "Task" [ 1028.219479] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.230469] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117039, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.240423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-567885cb-b2a3-43dc-8a98-80e38d122910 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.991s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.240950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.047s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.241196] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "f5b81761-6db9-4260-8876-435bac74b027-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.242152] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.242944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.245178] env[62109]: INFO nova.compute.manager [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Terminating instance [ 1028.247062] env[62109]: DEBUG nova.compute.manager [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1028.247285] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.248489] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdc4e84-5cba-4e1b-a72b-38d1a3330c2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.255836] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1028.256125] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b08f29b-0e24-4a8b-9c2d-834010adf2b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.264782] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1028.264782] env[62109]: value = "task-1117040" [ 1028.264782] env[62109]: _type = "Task" [ 1028.264782] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.273590] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.306964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.306964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.345339] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.349093] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad70e20b-d8da-47e1-8df2-b16768043214 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.357786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4325ed-4c3e-4516-8fa5-90dc79a2680f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.393610] env[62109]: DEBUG nova.compute.manager [req-5d4f33ca-92c3-4724-abc1-dc2bc70cf434 req-9490ceb5-4d29-448c-9950-4c404c973328 service nova] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Detach interface failed, port_id=63549817-3bd1-441c-af9c-739682b35cf2, reason: Instance 39c17e34-c8c0-4a66-8d22-717efcb984bc could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1028.458240] env[62109]: DEBUG nova.scheduler.client.report [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.714089] env[62109]: INFO nova.compute.manager [-] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Took 1.58 seconds to deallocate network for instance. [ 1028.730671] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117039, 'name': CreateVM_Task, 'duration_secs': 0.38356} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.730900] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1028.731720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.731920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.732293] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.732583] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9136bea0-3a39-4955-a03b-7a4489b4c4d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.737563] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1028.737563] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529cfab1-da93-c7f3-8550-906e75f1a781" [ 1028.737563] env[62109]: _type = "Task" [ 1028.737563] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.748622] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529cfab1-da93-c7f3-8550-906e75f1a781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.779612] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.779944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.779944] env[62109]: DEBUG nova.compute.manager [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Going to confirm migration 5 {{(pid=62109) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1028.781983] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117040, 'name': PowerOffVM_Task, 'duration_secs': 0.208898} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.782474] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1028.782652] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1028.782923] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e45dcc0-53d9-4bf2-9f5d-4652158d2515 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.810325] env[62109]: INFO nova.compute.manager [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Detaching volume c1ea88f9-7236-4370-a00a-5b149ceb966b [ 1028.843654] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.848208] env[62109]: INFO nova.virt.block_device [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Attempting to driver detach volume c1ea88f9-7236-4370-a00a-5b149ceb966b from mountpoint /dev/sdc [ 1028.848474] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1028.848673] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244519', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'name': 'volume-c1ea88f9-7236-4370-a00a-5b149ceb966b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'serial': 'c1ea88f9-7236-4370-a00a-5b149ceb966b'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1028.849505] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e4b629-d45b-4949-88e1-63fd1dbf7d7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.872324] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10728d5-51d3-4021-8198-082bb0f948e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.875025] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1028.875237] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1028.875418] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Deleting the datastore file [datastore2] f5b81761-6db9-4260-8876-435bac74b027 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.875661] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d138d37b-ef1f-4904-bfd3-aed075ccc369 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.881799] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74f4106-b838-4594-8e5f-6f682a537a0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.885213] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for the task: (returnval){ [ 1028.885213] env[62109]: value = "task-1117042" [ 1028.885213] env[62109]: _type = "Task" [ 1028.885213] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.906206] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1750d8af-6164-4875-a8c2-f0d6964e4b62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.911394] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.923801] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] The volume has not been displaced from its original location: [datastore2] volume-c1ea88f9-7236-4370-a00a-5b149ceb966b/volume-c1ea88f9-7236-4370-a00a-5b149ceb966b.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1028.930566] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfiguring VM instance instance-00000055 to detach disk 2002 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1028.930916] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de87f389-c364-4863-8e94-666962f88f0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.949723] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1028.949723] env[62109]: value = "task-1117043" [ 1028.949723] env[62109]: _type = "Task" [ 1028.949723] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.958855] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117043, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.963772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.964330] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1029.221523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.221839] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.222142] env[62109]: DEBUG nova.objects.instance [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'resources' on Instance uuid 39c17e34-c8c0-4a66-8d22-717efcb984bc {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.248051] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529cfab1-da93-c7f3-8550-906e75f1a781, 'name': SearchDatastore_Task, 'duration_secs': 0.010039} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.248414] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.248688] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.249014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.249102] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.249289] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.249545] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29c8c4b5-fbee-4813-8831-368293d4cbad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.262977] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.262977] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1029.263700] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35a64c66-5ab2-4277-8eb0-9ffff173eb2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.269153] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1029.269153] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5254f444-7b20-883e-3be9-6faa7e59a4a5" [ 1029.269153] env[62109]: _type = "Task" [ 1029.269153] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.276896] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5254f444-7b20-883e-3be9-6faa7e59a4a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.344427] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.351869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.352113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquired lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.352297] env[62109]: DEBUG nova.network.neutron [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.352491] env[62109]: DEBUG nova.objects.instance [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'info_cache' on Instance uuid 2fddcd6c-241e-4591-acec-12487909355c {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.396305] env[62109]: DEBUG oslo_vmware.api [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Task: {'id': task-1117042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253345} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.396579] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.396810] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1029.397014] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1029.397203] env[62109]: INFO nova.compute.manager [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] [instance: f5b81761-6db9-4260-8876-435bac74b027] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1029.397448] env[62109]: DEBUG oslo.service.loopingcall [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.397910] env[62109]: DEBUG nova.compute.manager [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1029.398015] env[62109]: DEBUG nova.network.neutron [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1029.461768] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117043, 'name': ReconfigVM_Task, 'duration_secs': 0.241821} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.462070] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Reconfigured VM instance instance-00000055 to detach disk 2002 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1029.466632] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3661a5b-03a7-445e-b946-9e6a90e420ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.477123] env[62109]: DEBUG nova.compute.utils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.478468] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1029.478641] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.487274] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1029.487274] env[62109]: value = "task-1117044" [ 1029.487274] env[62109]: _type = "Task" [ 1029.487274] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.496218] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117044, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.538304] env[62109]: DEBUG nova.policy [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1029.782861] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5254f444-7b20-883e-3be9-6faa7e59a4a5, 'name': SearchDatastore_Task, 'duration_secs': 0.035424} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.783671] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c86b0c-55e5-43f9-a22b-84577ac2d5ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.789319] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1029.789319] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bac8ee-c6ec-0d52-b56b-4c7320710241" [ 1029.789319] env[62109]: _type = "Task" [ 1029.789319] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.797988] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bac8ee-c6ec-0d52-b56b-4c7320710241, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.845094] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.873881] env[62109]: DEBUG nova.compute.manager [req-06be0b27-b12c-456d-94d7-3d015cc449a5 req-a1d83b92-a4c9-4b99-a0ee-4b9da81f25e1 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Received event network-vif-deleted-1568441e-6c95-41db-b95d-e0029a3b218a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1029.874105] env[62109]: INFO nova.compute.manager [req-06be0b27-b12c-456d-94d7-3d015cc449a5 req-a1d83b92-a4c9-4b99-a0ee-4b9da81f25e1 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Neutron deleted interface 1568441e-6c95-41db-b95d-e0029a3b218a; detaching it from the instance and deleting it from the info cache [ 1029.874287] env[62109]: DEBUG nova.network.neutron [req-06be0b27-b12c-456d-94d7-3d015cc449a5 req-a1d83b92-a4c9-4b99-a0ee-4b9da81f25e1 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.894164] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Successfully created port: 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.978316] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef22612-bc13-43c9-a1d4-6e4f83232d2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.981369] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1029.992198] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732b971b-568d-4185-9880-b1e0906f3e4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.000773] env[62109]: DEBUG oslo_vmware.api [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117044, 'name': ReconfigVM_Task, 'duration_secs': 0.216092} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.027721] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244519', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'name': 'volume-c1ea88f9-7236-4370-a00a-5b149ceb966b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a197a73e-32bc-45b0-ae6f-5275cf74285b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c1ea88f9-7236-4370-a00a-5b149ceb966b', 'serial': 'c1ea88f9-7236-4370-a00a-5b149ceb966b'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1030.031946] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b62c91-d21c-46cd-be60-a1735360ef8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.040204] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4983049a-55bf-4ecd-b361-9b675e3b59c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.055929] env[62109]: DEBUG nova.compute.provider_tree [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.247217] env[62109]: DEBUG nova.network.neutron [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.300161] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52bac8ee-c6ec-0d52-b56b-4c7320710241, 'name': SearchDatastore_Task, 'duration_secs': 0.01532} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.300610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.300910] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 12288104-483b-4bb4-9e33-05bf5d7be3a8/12288104-483b-4bb4-9e33-05bf5d7be3a8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1030.301230] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2afde31f-1596-42fc-8fca-020a3c0f6b84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.307953] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1030.307953] env[62109]: value = "task-1117045" [ 1030.307953] env[62109]: _type = "Task" [ 1030.307953] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.316131] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.345310] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.377414] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53d4a5aa-625c-4a3b-aed3-d03c25a7ee00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.386335] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8dda1f-ef24-4dca-b4a5-c6178dc0f0e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.420071] env[62109]: DEBUG nova.compute.manager [req-06be0b27-b12c-456d-94d7-3d015cc449a5 req-a1d83b92-a4c9-4b99-a0ee-4b9da81f25e1 service nova] [instance: f5b81761-6db9-4260-8876-435bac74b027] Detach interface failed, port_id=1568441e-6c95-41db-b95d-e0029a3b218a, reason: Instance f5b81761-6db9-4260-8876-435bac74b027 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1030.558224] env[62109]: DEBUG nova.scheduler.client.report [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1030.583200] env[62109]: DEBUG nova.objects.instance [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'flavor' on Instance uuid a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.597240] env[62109]: DEBUG nova.network.neutron [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [{"id": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "address": "fa:16:3e:60:5c:8d", "network": {"id": "75d10e6f-9d20-4575-868a-cf365c5d8cff", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1535693756-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f94fd7a82dc0489597534c518365971b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d4990-61", "ovs_interfaceid": "879d4990-6187-4722-b8c5-0c20f9fa59cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.750498] env[62109]: INFO nova.compute.manager [-] [instance: f5b81761-6db9-4260-8876-435bac74b027] Took 1.35 seconds to deallocate network for instance. [ 1030.820182] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117045, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.849214] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.993413] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1031.019597] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1031.019938] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1031.020120] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.020217] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1031.020466] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.020628] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1031.020846] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1031.021021] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1031.021205] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1031.021504] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1031.021692] env[62109]: DEBUG nova.virt.hardware [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.022693] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452809d1-b510-41df-8cd9-dd700723a0e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.034957] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777c1a8f-7afe-47ea-b136-c1677f99c383 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.065620] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.099160] env[62109]: INFO nova.scheduler.client.report [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted allocations for instance 39c17e34-c8c0-4a66-8d22-717efcb984bc [ 1031.100987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Releasing lock "refresh_cache-2fddcd6c-241e-4591-acec-12487909355c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.101280] env[62109]: DEBUG nova.objects.instance [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lazy-loading 'migration_context' on Instance uuid 2fddcd6c-241e-4591-acec-12487909355c {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.258890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.259247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.259520] env[62109]: DEBUG nova.objects.instance [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lazy-loading 'resources' on Instance uuid f5b81761-6db9-4260-8876-435bac74b027 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.260726] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.261014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.261202] env[62109]: INFO nova.compute.manager [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Shelving [ 1031.320471] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.827349} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.320845] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 12288104-483b-4bb4-9e33-05bf5d7be3a8/12288104-483b-4bb4-9e33-05bf5d7be3a8.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1031.321453] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.321732] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86e4e4d3-5a6e-4ce1-af91-9dcd2a1a48b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.330536] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1031.330536] env[62109]: value = "task-1117046" [ 1031.330536] env[62109]: _type = "Task" [ 1031.330536] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.340290] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.348210] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.395561] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Successfully updated port: 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.591079] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2b11106-4fa2-4b67-b6ca-c28e4546638d tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.284s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.606190] env[62109]: DEBUG nova.objects.base [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Object Instance<2fddcd6c-241e-4591-acec-12487909355c> lazy-loaded attributes: info_cache,migration_context {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1031.606910] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cb14cb-c02d-44ac-8ac0-382120f958d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.612752] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a5ef4e5-8c2f-4fdb-a16c-78199c9689e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "39c17e34-c8c0-4a66-8d22-717efcb984bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.617s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.630797] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50124eee-28c7-4bcb-9e07-1238312fae13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.636652] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1031.636652] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5247f6be-7bb0-465d-a218-6b76ec308fa9" [ 1031.636652] env[62109]: _type = "Task" [ 1031.636652] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.644523] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5247f6be-7bb0-465d-a218-6b76ec308fa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.771968] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1031.772326] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f496647-9a16-4cc5-9417-a490669ee559 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.779701] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1031.779701] env[62109]: value = "task-1117048" [ 1031.779701] env[62109]: _type = "Task" [ 1031.779701] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.788633] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.841055] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074964} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.844524] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.847927] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b5f128-883c-4f69-93d3-f3f52f4a7a19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.856570] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.876505] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 12288104-483b-4bb4-9e33-05bf5d7be3a8/12288104-483b-4bb4-9e33-05bf5d7be3a8.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.879525] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-575ad941-0b04-4c00-a293-3dd1d6718d8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.898748] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.899091] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.899503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.899758] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.899983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.902656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.902797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.903029] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1031.906552] env[62109]: INFO nova.compute.manager [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Terminating instance [ 1031.909363] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1031.909363] env[62109]: value = "task-1117049" [ 1031.909363] env[62109]: _type = "Task" [ 1031.909363] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.910473] env[62109]: DEBUG nova.compute.manager [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Received event network-vif-plugged-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1031.910670] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Acquiring lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.910869] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.911048] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.911222] env[62109]: DEBUG nova.compute.manager [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] No waiting events found dispatching network-vif-plugged-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1031.911420] env[62109]: WARNING nova.compute.manager [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Received unexpected event network-vif-plugged-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea for instance with vm_state building and task_state spawning. [ 1031.911587] env[62109]: DEBUG nova.compute.manager [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Received event network-changed-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1031.911742] env[62109]: DEBUG nova.compute.manager [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Refreshing instance network info cache due to event network-changed-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1031.911921] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Acquiring lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.916931] env[62109]: DEBUG nova.compute.manager [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1031.917178] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1031.918641] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336a3fd1-1ebd-438e-b68d-83c69b021113 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.933251] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.935688] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1031.938324] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95f7db2e-f4b0-499c-b746-d76a07655d13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.946780] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1031.946780] env[62109]: value = "task-1117050" [ 1031.946780] env[62109]: _type = "Task" [ 1031.946780] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.957432] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.958354] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1032.049017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4801d1b-fd6a-4fed-994a-887e7b486114 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.061420] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1c6e57-3b1a-4bf8-9119-c36e815cc1c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.098056] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a400303d-e9a6-4a09-a78a-08f404c4feeb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.106039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba37a4f-91bc-43ab-afb6-21ac4a6765f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.120295] env[62109]: DEBUG nova.compute.provider_tree [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.138517] env[62109]: DEBUG nova.network.neutron [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updating instance_info_cache with network_info: [{"id": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "address": "fa:16:3e:03:d5:d5", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228f2fe-46", "ovs_interfaceid": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.153099] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5247f6be-7bb0-465d-a218-6b76ec308fa9, 'name': SearchDatastore_Task, 'duration_secs': 0.007118} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.153709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.289655] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117048, 'name': PowerOffVM_Task, 'duration_secs': 0.304024} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.290530] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1032.291142] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4129a187-a627-4a8a-8ac1-70d5d48d0680 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.309626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a670fb8-31ca-4367-b83e-1db41e7c5b4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.350645] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.427773] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117049, 'name': ReconfigVM_Task, 'duration_secs': 0.309006} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.428070] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 12288104-483b-4bb4-9e33-05bf5d7be3a8/12288104-483b-4bb4-9e33-05bf5d7be3a8.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.428726] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd57aed1-49b6-46c8-aa3d-3d91edf5efd0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.436232] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1032.436232] env[62109]: value = "task-1117051" [ 1032.436232] env[62109]: _type = "Task" [ 1032.436232] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.446252] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117051, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.454452] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117050, 'name': PowerOffVM_Task, 'duration_secs': 0.232691} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.454690] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1032.454864] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1032.455127] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b1b3ada-968d-4399-baf0-e881b1841050 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.519640] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1032.519966] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1032.520753] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleting the datastore file [datastore1] a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.520753] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a97655ad-ffe9-4245-a287-43c14d0d1076 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.527936] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for the task: (returnval){ [ 1032.527936] env[62109]: value = "task-1117053" [ 1032.527936] env[62109]: _type = "Task" [ 1032.527936] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.536099] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.623872] env[62109]: DEBUG nova.scheduler.client.report [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.645049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.645332] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Instance network_info: |[{"id": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "address": "fa:16:3e:03:d5:d5", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228f2fe-46", "ovs_interfaceid": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1032.645626] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Acquired lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.645812] env[62109]: DEBUG nova.network.neutron [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Refreshing network info cache for port 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1032.646878] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:d5:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6228f2fe-46b3-4b2e-ac77-0abf0078c9ea', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.654728] env[62109]: DEBUG oslo.service.loopingcall [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.655563] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1032.655791] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f7b219d-d298-4932-866d-4428a1b9d9d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.675613] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.675613] env[62109]: value = "task-1117054" [ 1032.675613] env[62109]: _type = "Task" [ 1032.675613] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.683438] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117054, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.820282] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1032.820599] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b2ac2df0-34c0-4981-bedc-0887e8569bb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.828271] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1032.828271] env[62109]: value = "task-1117055" [ 1032.828271] env[62109]: _type = "Task" [ 1032.828271] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.837351] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117055, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.849882] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.946889] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117051, 'name': Rename_Task, 'duration_secs': 0.14767} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.947336] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1032.947618] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51e7bd5e-1227-4012-9b2b-7f7e9a28d07a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.954750] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1032.954750] env[62109]: value = "task-1117056" [ 1032.954750] env[62109]: _type = "Task" [ 1032.954750] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.962897] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.038087] env[62109]: DEBUG oslo_vmware.api [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Task: {'id': task-1117053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.382916} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.038461] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.038715] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1033.038957] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1033.039209] env[62109]: INFO nova.compute.manager [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1033.039517] env[62109]: DEBUG oslo.service.loopingcall [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.039779] env[62109]: DEBUG nova.compute.manager [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.039917] env[62109]: DEBUG nova.network.neutron [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1033.129216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.131913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.978s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.150944] env[62109]: INFO nova.scheduler.client.report [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Deleted allocations for instance f5b81761-6db9-4260-8876-435bac74b027 [ 1033.186695] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117054, 'name': CreateVM_Task, 'duration_secs': 0.380316} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.186695] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1033.187312] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.187491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.187835] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1033.188491] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9cfea81-e9ea-4f25-8b8c-7ad57ae03769 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.192896] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1033.192896] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a3b30c-f80c-5e50-6359-3c0b8d02c6b3" [ 1033.192896] env[62109]: _type = "Task" [ 1033.192896] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.203993] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a3b30c-f80c-5e50-6359-3c0b8d02c6b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.344443] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117055, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.357858] env[62109]: DEBUG oslo_vmware.api [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117038, 'name': ReconfigVM_Task, 'duration_secs': 5.796287} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.357858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.357858] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Reconfigured VM to detach interface {{(pid=62109) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1033.469412] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117056, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.514893] env[62109]: DEBUG nova.network.neutron [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updated VIF entry in instance network info cache for port 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1033.515504] env[62109]: DEBUG nova.network.neutron [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updating instance_info_cache with network_info: [{"id": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "address": "fa:16:3e:03:d5:d5", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228f2fe-46", "ovs_interfaceid": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.659154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3b43a031-d52c-40b9-a929-f835fdbdf464 tempest-InstanceActionsV221TestJSON-1724895162 tempest-InstanceActionsV221TestJSON-1724895162-project-member] Lock "f5b81761-6db9-4260-8876-435bac74b027" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.418s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.711019] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a3b30c-f80c-5e50-6359-3c0b8d02c6b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009107} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.711019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.711019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.711019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.711019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.711019] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.711019] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-324e476b-352a-4b7e-9ce1-4ca507f68534 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.721019] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.721019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1033.721019] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f0acb6-00ec-41b2-8f9f-3cc9e287e8ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.727417] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1033.727417] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524887cc-9556-4d54-42b5-285ba8e5beb0" [ 1033.727417] env[62109]: _type = "Task" [ 1033.727417] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.739557] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524887cc-9556-4d54-42b5-285ba8e5beb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.841578] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117055, 'name': CreateSnapshot_Task, 'duration_secs': 0.685238} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.841958] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1033.842811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0f656d-9304-444b-8b0f-0f147260acec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.880885] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f165c03-0ef4-4b3e-bd23-b255af197140 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.889012] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54eff32a-77f8-496f-9a44-ad8182f79650 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.922542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f68ce0-e059-4cad-8658-13fa061fa7e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.930868] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fd15dc-7a9c-45d1-8927-2cac602f4d04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.945329] env[62109]: DEBUG nova.compute.provider_tree [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1033.964344] env[62109]: DEBUG oslo_vmware.api [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117056, 'name': PowerOnVM_Task, 'duration_secs': 0.556401} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.964678] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1033.964899] env[62109]: INFO nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1033.965095] env[62109]: DEBUG nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.965834] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7718231-bb2a-4e13-9aca-f7d80c967897 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.010658] env[62109]: DEBUG nova.compute.manager [req-2ff18990-061c-4026-97e8-f7cb0efbf024 req-23d56eff-0f90-421a-8600-a26e3a379aea service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Received event network-vif-deleted-98e332f4-3bb6-4be2-b072-a11329289f58 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.010867] env[62109]: INFO nova.compute.manager [req-2ff18990-061c-4026-97e8-f7cb0efbf024 req-23d56eff-0f90-421a-8600-a26e3a379aea service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Neutron deleted interface 98e332f4-3bb6-4be2-b072-a11329289f58; detaching it from the instance and deleting it from the info cache [ 1034.011061] env[62109]: DEBUG nova.network.neutron [req-2ff18990-061c-4026-97e8-f7cb0efbf024 req-23d56eff-0f90-421a-8600-a26e3a379aea service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.018341] env[62109]: DEBUG oslo_concurrency.lockutils [req-5971ec77-a3ad-4c76-91b6-386361cda0f9 req-e8a460b7-9254-4b70-aad2-02a5bcf51667 service nova] Releasing lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.241284] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524887cc-9556-4d54-42b5-285ba8e5beb0, 'name': SearchDatastore_Task, 'duration_secs': 0.01606} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.242113] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a8413d2-45fe-44a5-b7bc-aa10490c201e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.247777] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1034.247777] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5221ee12-c1e7-deef-3cc4-07f448bfc37a" [ 1034.247777] env[62109]: _type = "Task" [ 1034.247777] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.256053] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5221ee12-c1e7-deef-3cc4-07f448bfc37a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.267546] env[62109]: DEBUG nova.network.neutron [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.364776] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1034.365129] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-71695559-7586-4070-9ee2-189efd17f471 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.375527] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1034.375527] env[62109]: value = "task-1117057" [ 1034.375527] env[62109]: _type = "Task" [ 1034.375527] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.386018] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.482614] env[62109]: DEBUG nova.scheduler.client.report [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updated inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1034.482998] env[62109]: DEBUG nova.compute.provider_tree [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating resource provider 574e9717-c25e-453d-8028-45d9e2f95398 generation from 133 to 134 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1034.483092] env[62109]: DEBUG nova.compute.provider_tree [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1034.488378] env[62109]: INFO nova.compute.manager [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Took 19.73 seconds to build instance. [ 1034.516785] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c34a1fb6-63c5-43ff-89bd-9bdb6b56ff40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.528349] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db997c6c-787b-4fb0-b6bf-125381bcc8f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.567758] env[62109]: DEBUG nova.compute.manager [req-2ff18990-061c-4026-97e8-f7cb0efbf024 req-23d56eff-0f90-421a-8600-a26e3a379aea service nova] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Detach interface failed, port_id=98e332f4-3bb6-4be2-b072-a11329289f58, reason: Instance a197a73e-32bc-45b0-ae6f-5275cf74285b could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1034.758358] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5221ee12-c1e7-deef-3cc4-07f448bfc37a, 'name': SearchDatastore_Task, 'duration_secs': 0.010374} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.758664] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.758965] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7/e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1034.759294] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f23c6b7-1b00-469e-af0b-f00236c86dd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.765856] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1034.765856] env[62109]: value = "task-1117058" [ 1034.765856] env[62109]: _type = "Task" [ 1034.765856] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.770671] env[62109]: INFO nova.compute.manager [-] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Took 1.73 seconds to deallocate network for instance. [ 1034.775483] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.783419] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.783600] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquired lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.783787] env[62109]: DEBUG nova.network.neutron [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1034.885707] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.994897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9ccd3716-e779-4100-b7ff-2c5f873bbc94 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.248s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.277339] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117058, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478605} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.277613] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7/e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1035.277837] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.278113] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87644784-525c-4c5b-984c-e88d62818544 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.282739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.288090] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1035.288090] env[62109]: value = "task-1117059" [ 1035.288090] env[62109]: _type = "Task" [ 1035.288090] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.298850] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.386087] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.496290] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.364s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.499971] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.500159] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.500410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.500631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.500820] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.502695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.222s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.502916] env[62109]: DEBUG nova.objects.instance [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lazy-loading 'resources' on Instance uuid a197a73e-32bc-45b0-ae6f-5275cf74285b {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.503921] env[62109]: INFO nova.compute.manager [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Terminating instance [ 1035.506741] env[62109]: DEBUG nova.compute.manager [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1035.506741] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1035.507830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823d41cd-08e3-4753-8fd0-b92e7d46a825 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.517396] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1035.517998] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7449369-6cb2-413a-aa90-c39d86271097 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.526586] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1035.526586] env[62109]: value = "task-1117060" [ 1035.526586] env[62109]: _type = "Task" [ 1035.526586] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.544101] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.721288] env[62109]: INFO nova.network.neutron [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Port fdebe937-16f4-47b6-982f-2a88b25aa054 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1035.721701] env[62109]: DEBUG nova.network.neutron [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [{"id": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "address": "fa:16:3e:4d:c9:f2", "network": {"id": "cfbec6c5-3421-476e-aca8-de96e0de15af", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1421655393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ee24c114bd495e8f29eeda1f6b8bba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef2387d-4f", "ovs_interfaceid": "bef2387d-4fe5-4a29-89fe-d990d0e93b2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.798368] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063827} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.798677] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.799515] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee119af0-d492-4ea1-baf9-b84cddf83d28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.823711] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7/e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.824072] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a35ec709-7075-4572-862a-0e2a1a9eca6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.845869] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1035.845869] env[62109]: value = "task-1117061" [ 1035.845869] env[62109]: _type = "Task" [ 1035.845869] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.854698] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.885768] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task} progress is 95%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.038537] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117060, 'name': PowerOffVM_Task, 'duration_secs': 0.237918} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.038949] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1036.039018] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1036.039257] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43b8e8aa-5579-4e6a-99e1-bbfd69fffb6c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.067994] env[62109]: INFO nova.scheduler.client.report [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocation for migration 67e3e12d-1802-4d1a-a8d3-cf456396b721 [ 1036.123893] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1036.124174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1036.124367] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleting the datastore file [datastore2] b5410f60-c5fb-4325-8d42-8745c310a6ca {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.128496] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bc2937a-e9fa-49b4-b49f-b6fd3fbd1ca8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.139320] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1036.139320] env[62109]: value = "task-1117063" [ 1036.139320] env[62109]: _type = "Task" [ 1036.139320] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.153018] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.222157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e119f7-fae1-45cd-8117-eabd71283fec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.225650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Releasing lock "refresh_cache-b5410f60-c5fb-4325-8d42-8745c310a6ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.236532] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c209b8d3-a9d8-440b-9cb9-5e99655e0cbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.268413] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de77d2ec-0efd-4db2-970c-9072b3971eb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.276355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e580df3-3c01-4f82-82f3-efc6afd269bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.290298] env[62109]: DEBUG nova.compute.provider_tree [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.357700] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117061, 'name': ReconfigVM_Task, 'duration_secs': 0.372478} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.357997] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Reconfigured VM instance instance-00000061 to attach disk [datastore2] e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7/e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.358685] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5e837cf-f7c3-4dc8-bd76-a18304b6010b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.364986] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1036.364986] env[62109]: value = "task-1117064" [ 1036.364986] env[62109]: _type = "Task" [ 1036.364986] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.372466] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117064, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.383886] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task} progress is 95%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.579374] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.799s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.637365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.637365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.637365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.637365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.637365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.638592] env[62109]: INFO nova.compute.manager [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Terminating instance [ 1036.643840] env[62109]: DEBUG nova.compute.manager [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.644071] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1036.644820] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d81d3a-a194-4186-b891-d53ef92cf2b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.652765] env[62109]: DEBUG oslo_vmware.api [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169123} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.654862] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.655108] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1036.655336] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1036.655549] env[62109]: INFO nova.compute.manager [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1036.655822] env[62109]: DEBUG oslo.service.loopingcall [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.656107] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1036.656362] env[62109]: DEBUG nova.compute.manager [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1036.656487] env[62109]: DEBUG nova.network.neutron [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1036.658060] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6264db48-2736-4e43-8a4a-a519283cd894 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.664987] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1036.664987] env[62109]: value = "task-1117065" [ 1036.664987] env[62109]: _type = "Task" [ 1036.664987] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.676155] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.700975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.701266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.701470] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.701683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.701882] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.704435] env[62109]: INFO nova.compute.manager [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Terminating instance [ 1036.707234] env[62109]: DEBUG nova.compute.manager [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.707596] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1036.708474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29baf6b3-a05f-44a1-8d2f-e94f98b236db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.718337] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1036.718617] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-342a0319-657e-41e0-af7c-6196e4f0a275 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.725199] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1036.725199] env[62109]: value = "task-1117066" [ 1036.725199] env[62109]: _type = "Task" [ 1036.725199] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.729546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fee7ce22-7680-4fe6-9c72-06843c699122 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "interface-b5410f60-c5fb-4325-8d42-8745c310a6ca-fdebe937-16f4-47b6-982f-2a88b25aa054" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.973s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.739407] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.793282] env[62109]: DEBUG nova.scheduler.client.report [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.875628] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117064, 'name': Rename_Task, 'duration_secs': 0.17405} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.875989] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1036.876262] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3118ed24-3599-4e7e-a96a-fe0fb6bb72c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.887523] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117057, 'name': CloneVM_Task, 'duration_secs': 2.383577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.888877] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Created linked-clone VM from snapshot [ 1036.889212] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1036.889212] env[62109]: value = "task-1117067" [ 1036.889212] env[62109]: _type = "Task" [ 1036.889212] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.889892] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1d0f4a-0b5a-4f34-a43c-d699c7cad681 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.900139] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Uploading image e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1036.905187] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.934024] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1036.934024] env[62109]: value = "vm-244526" [ 1036.934024] env[62109]: _type = "VirtualMachine" [ 1036.934024] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1036.934477] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3787c81e-c6ca-4e0e-91ec-f0ddd6a2b245 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.941955] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lease: (returnval){ [ 1036.941955] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5291a07f-5aee-7a11-dde3-8dc8891c5126" [ 1036.941955] env[62109]: _type = "HttpNfcLease" [ 1036.941955] env[62109]: } obtained for exporting VM: (result){ [ 1036.941955] env[62109]: value = "vm-244526" [ 1036.941955] env[62109]: _type = "VirtualMachine" [ 1036.941955] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1036.942419] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the lease: (returnval){ [ 1036.942419] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5291a07f-5aee-7a11-dde3-8dc8891c5126" [ 1036.942419] env[62109]: _type = "HttpNfcLease" [ 1036.942419] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1036.949910] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.949910] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5291a07f-5aee-7a11-dde3-8dc8891c5126" [ 1036.949910] env[62109]: _type = "HttpNfcLease" [ 1036.949910] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1037.175754] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117065, 'name': PowerOffVM_Task, 'duration_secs': 0.396791} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.176037] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1037.176221] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1037.176495] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99005ff5-fca8-4721-a660-d1a572d902db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.240427] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117066, 'name': PowerOffVM_Task, 'duration_secs': 0.336041} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.240967] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1037.241162] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1037.241636] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8e79180-da31-4720-8561-15d1e54203eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.298152] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.312224] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1037.314019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1037.314019] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleting the datastore file [datastore1] 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.314019] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e3c3951-b45f-40f0-b23d-c8eaf0cc055d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.319851] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1037.319998] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1037.320236] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleting the datastore file [datastore2] 12288104-483b-4bb4-9e33-05bf5d7be3a8 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.321827] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ae1c1f7-479d-4f32-b7f4-581ca120cf3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.325134] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1037.325134] env[62109]: value = "task-1117071" [ 1037.325134] env[62109]: _type = "Task" [ 1037.325134] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.327162] env[62109]: INFO nova.scheduler.client.report [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Deleted allocations for instance a197a73e-32bc-45b0-ae6f-5275cf74285b [ 1037.338084] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for the task: (returnval){ [ 1037.338084] env[62109]: value = "task-1117072" [ 1037.338084] env[62109]: _type = "Task" [ 1037.338084] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.345219] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.349897] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.402725] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117067, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.451894] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1037.451894] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5291a07f-5aee-7a11-dde3-8dc8891c5126" [ 1037.451894] env[62109]: _type = "HttpNfcLease" [ 1037.451894] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1037.452217] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1037.452217] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5291a07f-5aee-7a11-dde3-8dc8891c5126" [ 1037.452217] env[62109]: _type = "HttpNfcLease" [ 1037.452217] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1037.452918] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05af7b83-0d49-4337-9360-a570c0715415 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.460311] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1037.460518] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1037.569171] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d1b7ef81-e11c-4291-9129-f3358e3a9935 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.807110] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.807587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.807681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "2fddcd6c-241e-4591-acec-12487909355c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.807903] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.808417] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.811229] env[62109]: INFO nova.compute.manager [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Terminating instance [ 1037.815062] env[62109]: DEBUG nova.compute.manager [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1037.815410] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1037.816413] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f16b13b-d45c-431a-bf6f-aee3ef71912a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.825217] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1037.825664] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b49eaa1-3f4f-445e-a227-e374b9407bf2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.839266] env[62109]: DEBUG oslo_vmware.api [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36882} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.841120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e092eb7-eb24-4f1e-b101-8917a571bae0 tempest-AttachVolumeTestJSON-2109712222 tempest-AttachVolumeTestJSON-2109712222-project-member] Lock "a197a73e-32bc-45b0-ae6f-5275cf74285b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.942s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.842438] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.842747] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1037.843026] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1037.843297] env[62109]: INFO nova.compute.manager [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1037.843632] env[62109]: DEBUG oslo.service.loopingcall [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.843985] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1037.843985] env[62109]: value = "task-1117073" [ 1037.843985] env[62109]: _type = "Task" [ 1037.843985] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.847606] env[62109]: DEBUG nova.compute.manager [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1037.847735] env[62109]: DEBUG nova.network.neutron [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1037.859053] env[62109]: DEBUG oslo_vmware.api [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Task: {'id': task-1117072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279336} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.869982] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.870231] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1037.870506] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1037.870708] env[62109]: INFO nova.compute.manager [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1037.870965] env[62109]: DEBUG oslo.service.loopingcall [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.871238] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.871774] env[62109]: DEBUG nova.compute.manager [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1037.871877] env[62109]: DEBUG nova.network.neutron [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1037.904482] env[62109]: DEBUG oslo_vmware.api [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117067, 'name': PowerOnVM_Task, 'duration_secs': 0.801375} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.904670] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1037.904910] env[62109]: INFO nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Took 6.91 seconds to spawn the instance on the hypervisor. [ 1037.905229] env[62109]: DEBUG nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1037.905956] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49d66ab-64b8-4a94-ab6a-40bdaf73e18b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.195825] env[62109]: DEBUG nova.compute.manager [req-2610d3cf-f50c-4cd8-81c0-56b454151e84 req-d248eff0-837e-4dcd-a384-42d5e7b37895 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Received event network-vif-deleted-bef2387d-4fe5-4a29-89fe-d990d0e93b2a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.195908] env[62109]: INFO nova.compute.manager [req-2610d3cf-f50c-4cd8-81c0-56b454151e84 req-d248eff0-837e-4dcd-a384-42d5e7b37895 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Neutron deleted interface bef2387d-4fe5-4a29-89fe-d990d0e93b2a; detaching it from the instance and deleting it from the info cache [ 1038.197568] env[62109]: DEBUG nova.network.neutron [req-2610d3cf-f50c-4cd8-81c0-56b454151e84 req-d248eff0-837e-4dcd-a384-42d5e7b37895 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.363513] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117073, 'name': PowerOffVM_Task, 'duration_secs': 0.307169} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.364754] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1038.364917] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1038.365232] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4f5e46c-8b6e-4395-b2da-3a830e592de3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.428443] env[62109]: DEBUG nova.compute.manager [req-90484591-89dd-4516-9cc8-81e1c711e9a5 req-fa106a6b-8fc8-4168-9357-a90c16192f0c service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Received event network-vif-deleted-b56d9eee-64a4-44a1-8e6c-47e14aea0e2f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.428443] env[62109]: INFO nova.compute.manager [req-90484591-89dd-4516-9cc8-81e1c711e9a5 req-fa106a6b-8fc8-4168-9357-a90c16192f0c service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Neutron deleted interface b56d9eee-64a4-44a1-8e6c-47e14aea0e2f; detaching it from the instance and deleting it from the info cache [ 1038.428443] env[62109]: DEBUG nova.network.neutron [req-90484591-89dd-4516-9cc8-81e1c711e9a5 req-fa106a6b-8fc8-4168-9357-a90c16192f0c service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.433109] env[62109]: INFO nova.compute.manager [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Took 13.51 seconds to build instance. [ 1038.439424] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1038.439632] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1038.439809] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleting the datastore file [datastore1] 2fddcd6c-241e-4591-acec-12487909355c {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.440987] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d48b39b4-2cbe-4f86-b745-5b8c681ae507 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.447941] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for the task: (returnval){ [ 1038.447941] env[62109]: value = "task-1117075" [ 1038.447941] env[62109]: _type = "Task" [ 1038.447941] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.462186] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.665293] env[62109]: DEBUG nova.network.neutron [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.700236] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef87521a-419b-47dc-8440-d7fe7ce9bc0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.715901] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b87b2c3-6f24-4c7e-8790-29ee6768bd73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.752043] env[62109]: DEBUG nova.compute.manager [req-2610d3cf-f50c-4cd8-81c0-56b454151e84 req-d248eff0-837e-4dcd-a384-42d5e7b37895 service nova] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Detach interface failed, port_id=bef2387d-4fe5-4a29-89fe-d990d0e93b2a, reason: Instance b5410f60-c5fb-4325-8d42-8745c310a6ca could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1038.888787] env[62109]: DEBUG nova.network.neutron [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.903463] env[62109]: DEBUG nova.network.neutron [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.935759] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c5003f3-a892-422d-8feb-572cbb0aa7d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.938932] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e578aa1b-9a2f-4415-b2d9-6c3e45c101ef tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.032s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.947374] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31af7d4f-efec-4b14-bcae-58c14ac44b50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.972202] env[62109]: DEBUG oslo_vmware.api [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Task: {'id': task-1117075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248228} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.972705] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.973029] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1038.974085] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1038.974085] env[62109]: INFO nova.compute.manager [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1038.974085] env[62109]: DEBUG oslo.service.loopingcall [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.974524] env[62109]: DEBUG nova.compute.manager [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1038.974793] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1038.986181] env[62109]: DEBUG nova.compute.manager [req-90484591-89dd-4516-9cc8-81e1c711e9a5 req-fa106a6b-8fc8-4168-9357-a90c16192f0c service nova] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Detach interface failed, port_id=b56d9eee-64a4-44a1-8e6c-47e14aea0e2f, reason: Instance 12288104-483b-4bb4-9e33-05bf5d7be3a8 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1039.169151] env[62109]: INFO nova.compute.manager [-] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Took 2.51 seconds to deallocate network for instance. [ 1039.391909] env[62109]: INFO nova.compute.manager [-] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Took 1.52 seconds to deallocate network for instance. [ 1039.407333] env[62109]: INFO nova.compute.manager [-] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Took 1.56 seconds to deallocate network for instance. [ 1039.683532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.683532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.683532] env[62109]: DEBUG nova.objects.instance [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'resources' on Instance uuid b5410f60-c5fb-4325-8d42-8745c310a6ca {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.778733] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.907363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.915616] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.282925] env[62109]: DEBUG nova.compute.manager [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Received event network-changed-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.283413] env[62109]: DEBUG nova.compute.manager [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Refreshing instance network info cache due to event network-changed-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1040.283921] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] Acquiring lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.284242] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] Acquired lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.284586] env[62109]: DEBUG nova.network.neutron [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Refreshing network info cache for port 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1040.286813] env[62109]: INFO nova.compute.manager [-] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Took 1.31 seconds to deallocate network for instance. [ 1040.414924] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0437084d-c1ac-4c9f-8645-92361cf7ba9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.423689] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dad449-46cb-4990-8ae5-50a6c254d25d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.458370] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe22b75c-6066-4df6-a29a-6615c23229ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.467348] env[62109]: DEBUG nova.compute.manager [req-54d20ba6-5ebd-4121-88b1-096e8fcdb4b2 req-09b7f191-dee4-4eb0-8ee5-557cbef19357 service nova] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Received event network-vif-deleted-7448d73b-2d36-46d9-9f1f-3ed3ede34226 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.467548] env[62109]: DEBUG nova.compute.manager [req-54d20ba6-5ebd-4121-88b1-096e8fcdb4b2 req-09b7f191-dee4-4eb0-8ee5-557cbef19357 service nova] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Received event network-vif-deleted-879d4990-6187-4722-b8c5-0c20f9fa59cc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.472392] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c857733-656e-4134-b43d-f776ed033e3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.488078] env[62109]: DEBUG nova.compute.provider_tree [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.804875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.992696] env[62109]: DEBUG nova.scheduler.client.report [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.020943] env[62109]: DEBUG nova.network.neutron [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updated VIF entry in instance network info cache for port 6228f2fe-46b3-4b2e-ac77-0abf0078c9ea. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1041.021219] env[62109]: DEBUG nova.network.neutron [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updating instance_info_cache with network_info: [{"id": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "address": "fa:16:3e:03:d5:d5", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228f2fe-46", "ovs_interfaceid": "6228f2fe-46b3-4b2e-ac77-0abf0078c9ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.081216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.081394] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.503809] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.507412] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.601s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.508287] env[62109]: DEBUG nova.objects.instance [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lazy-loading 'resources' on Instance uuid 12288104-483b-4bb4-9e33-05bf5d7be3a8 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.523888] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a2b436f-5f75-443e-835b-8fb34f35d3e7 req-252f23fd-7fcc-4e7a-9f99-7d9001d14ecd service nova] Releasing lock "refresh_cache-e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.526394] env[62109]: INFO nova.scheduler.client.report [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted allocations for instance b5410f60-c5fb-4325-8d42-8745c310a6ca [ 1041.583571] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1042.033930] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f85d1730-ab47-4aaf-8429-972936bf7c5d tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "b5410f60-c5fb-4325-8d42-8745c310a6ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.534s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.103875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.170091] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84586113-5b4f-4ef8-aad1-ecf57c999bbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.178728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce3b297-f813-42a9-9544-d6e29dc9184d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.213809] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d5af8d-c726-4cf2-b54e-9de8ea755dbb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.222035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24ab895-70d1-4780-850b-83209d375e07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.237794] env[62109]: DEBUG nova.compute.provider_tree [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.740850] env[62109]: DEBUG nova.scheduler.client.report [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.930728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.931018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.931246] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "6b5a009e-28f5-4be7-8641-089abe359954-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.931441] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.931616] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.935979] env[62109]: INFO nova.compute.manager [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Terminating instance [ 1042.938155] env[62109]: DEBUG nova.compute.manager [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1042.938385] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1042.939262] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44983a89-c1f2-4776-866c-463a77e08763 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.947437] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1042.947682] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7d53714-502d-4cfe-97c5-1a48343d1cea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.954270] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1042.954270] env[62109]: value = "task-1117079" [ 1042.954270] env[62109]: _type = "Task" [ 1042.954270] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.961757] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.246171] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.739s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.248729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.334s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.248981] env[62109]: DEBUG nova.objects.instance [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'resources' on Instance uuid 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.265693] env[62109]: INFO nova.scheduler.client.report [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Deleted allocations for instance 12288104-483b-4bb4-9e33-05bf5d7be3a8 [ 1043.464856] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117079, 'name': PowerOffVM_Task, 'duration_secs': 0.181339} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.465091] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1043.465276] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1043.465575] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-799ed337-3f02-4c72-a1c3-4e1f50270ee2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.527279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.527585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.541030] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1043.541030] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1043.541030] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleting the datastore file [datastore2] 6b5a009e-28f5-4be7-8641-089abe359954 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.541234] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-112eb906-71b8-429b-b162-561ea901e610 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.548472] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for the task: (returnval){ [ 1043.548472] env[62109]: value = "task-1117081" [ 1043.548472] env[62109]: _type = "Task" [ 1043.548472] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.556638] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.773374] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c60c92d-2ccb-4801-8a5b-d3f55e25aee6 tempest-ServerDiskConfigTestJSON-842321983 tempest-ServerDiskConfigTestJSON-842321983-project-member] Lock "12288104-483b-4bb4-9e33-05bf5d7be3a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.072s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.922316] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4f225b-898f-476f-876b-90e7bf285e2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.931981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d542ae70-3465-4390-b7cd-d8868f678100 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.965999] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a86c34-941e-4930-96f9-3ffee91bb478 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.974555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc62504-60a5-4143-99e0-a55222216b42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.989031] env[62109]: DEBUG nova.compute.provider_tree [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.030914] env[62109]: DEBUG nova.compute.utils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.061202] env[62109]: DEBUG oslo_vmware.api [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Task: {'id': task-1117081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476017} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.061726] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.061830] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1044.062085] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1044.062324] env[62109]: INFO nova.compute.manager [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1044.062654] env[62109]: DEBUG oslo.service.loopingcall [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.063284] env[62109]: DEBUG nova.compute.manager [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1044.063426] env[62109]: DEBUG nova.network.neutron [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1044.496366] env[62109]: DEBUG nova.scheduler.client.report [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.506057] env[62109]: DEBUG nova.compute.manager [req-a18e6237-5aab-4466-b9d8-09c619dcd2c6 req-a17bff76-61bd-42e7-890b-6218ee76552a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Received event network-vif-deleted-f8fbaff0-8e7e-4f1c-9709-51d00228bc0d {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.506383] env[62109]: INFO nova.compute.manager [req-a18e6237-5aab-4466-b9d8-09c619dcd2c6 req-a17bff76-61bd-42e7-890b-6218ee76552a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Neutron deleted interface f8fbaff0-8e7e-4f1c-9709-51d00228bc0d; detaching it from the instance and deleting it from the info cache [ 1044.506605] env[62109]: DEBUG nova.network.neutron [req-a18e6237-5aab-4466-b9d8-09c619dcd2c6 req-a17bff76-61bd-42e7-890b-6218ee76552a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.534379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.970229] env[62109]: DEBUG nova.network.neutron [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.012156] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.763s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.014477] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da10b2d5-52ee-4f85-9f37-dbe2ae0e9fdc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.016555] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.212s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.016762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.018827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.915s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.020930] env[62109]: INFO nova.compute.claims [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.036921] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3fbe9f9-156a-43ee-ad0f-ddaf8af3c3ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.053260] env[62109]: INFO nova.scheduler.client.report [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Deleted allocations for instance 2fddcd6c-241e-4591-acec-12487909355c [ 1045.055091] env[62109]: INFO nova.scheduler.client.report [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted allocations for instance 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4 [ 1045.082635] env[62109]: DEBUG nova.compute.manager [req-a18e6237-5aab-4466-b9d8-09c619dcd2c6 req-a17bff76-61bd-42e7-890b-6218ee76552a service nova] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Detach interface failed, port_id=f8fbaff0-8e7e-4f1c-9709-51d00228bc0d, reason: Instance 6b5a009e-28f5-4be7-8641-089abe359954 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1045.473786] env[62109]: INFO nova.compute.manager [-] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Took 1.41 seconds to deallocate network for instance. [ 1045.569543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dfc00f38-7024-4aaa-8ccc-250e4cb4c09a tempest-DeleteServersTestJSON-65278859 tempest-DeleteServersTestJSON-65278859-project-member] Lock "2fddcd6c-241e-4591-acec-12487909355c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.762s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.571058] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0be0db54-27d0-4270-9695-fdc74f0340e0 tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "58365fb6-a38e-4afa-be36-3cdcdbdbc2b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.935s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.620084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.620434] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.620689] env[62109]: INFO nova.compute.manager [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Attaching volume 321c4adf-adc3-49cc-a7cc-31ff829b0f66 to /dev/sdb [ 1045.668853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6103810-7f78-4a95-99d2-4ef9740ac750 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.676945] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c013a7-b7d9-4420-adbd-fcc422cfb771 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.693650] env[62109]: DEBUG nova.virt.block_device [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating existing volume attachment record: d840839f-d8e2-4155-ae0a-4bef1880c5f3 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1045.982056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.178217] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13655369-6a81-4a4f-ae34-1fddbfc3e982 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.186449] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e663a7-27ef-488b-972e-866d3a193f72 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.220113] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a839291-4eae-4f06-a8d1-38af561ac290 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.229098] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e7752a-de1f-4ee7-8816-ed85f998bf31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.245577] env[62109]: DEBUG nova.compute.provider_tree [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.319139] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.319506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.319768] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.320024] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.320305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.323237] env[62109]: INFO nova.compute.manager [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Terminating instance [ 1046.325428] env[62109]: DEBUG nova.compute.manager [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1046.325653] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1046.326723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dabb30d-57ce-4562-b3f7-bcf28dd05326 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.336294] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1046.336560] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aeff7017-77c0-4999-b024-4d3fa9568e4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.348032] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1046.348032] env[62109]: value = "task-1117088" [ 1046.348032] env[62109]: _type = "Task" [ 1046.348032] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.354098] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.750015] env[62109]: DEBUG nova.scheduler.client.report [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.856260] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117088, 'name': PowerOffVM_Task, 'duration_secs': 0.288503} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.856546] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1046.856716] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1046.856976] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be2aa4c9-8a5d-444d-9dee-eedc36cad99b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.940651] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1046.943084] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1046.943084] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleting the datastore file [datastore2] 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.943084] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef48c5d5-5022-42c4-bf9c-7960e0989552 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.955045] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for the task: (returnval){ [ 1046.955045] env[62109]: value = "task-1117090" [ 1046.955045] env[62109]: _type = "Task" [ 1046.955045] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.963653] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.259161] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.260436] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1047.265098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.284s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.266311] env[62109]: DEBUG nova.objects.instance [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lazy-loading 'resources' on Instance uuid 6b5a009e-28f5-4be7-8641-089abe359954 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.469147] env[62109]: DEBUG oslo_vmware.api [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Task: {'id': task-1117090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440953} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.469147] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.469147] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1047.469147] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1047.469147] env[62109]: INFO nova.compute.manager [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1047.469147] env[62109]: DEBUG oslo.service.loopingcall [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.469147] env[62109]: DEBUG nova.compute.manager [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1047.469420] env[62109]: DEBUG nova.network.neutron [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1047.496498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.497136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.769254] env[62109]: DEBUG nova.compute.utils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1047.777034] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1047.779912] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1047.857727] env[62109]: DEBUG nova.policy [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '162ee47083e0419cb5a7360898959b7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '602e84ee01de44dabeb7c20fdbcb5795', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1047.940434] env[62109]: DEBUG nova.compute.manager [req-70e651b6-cc01-42b3-912f-a0b0a86e5c13 req-9b384c82-d56d-489d-ad38-712f95f61bf9 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Received event network-vif-deleted-4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.940696] env[62109]: INFO nova.compute.manager [req-70e651b6-cc01-42b3-912f-a0b0a86e5c13 req-9b384c82-d56d-489d-ad38-712f95f61bf9 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Neutron deleted interface 4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6; detaching it from the instance and deleting it from the info cache [ 1047.940874] env[62109]: DEBUG nova.network.neutron [req-70e651b6-cc01-42b3-912f-a0b0a86e5c13 req-9b384c82-d56d-489d-ad38-712f95f61bf9 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.953769] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5c3dc3-55a1-445d-89ba-72890148a701 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.963375] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1da394-8750-4dc5-868b-090f144e6242 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.001417] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1048.005596] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3489cff7-614b-411e-8e4a-79fc3a9ee5f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.015430] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f7424a-07db-4d37-918b-49f0bb805643 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.030917] env[62109]: DEBUG nova.compute.provider_tree [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.195021] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1048.196087] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f053414-de6e-498d-9538-46de787b188e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.205159] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1048.206302] env[62109]: ERROR oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk due to incomplete transfer. [ 1048.206302] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Successfully created port: 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.208490] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9b48a639-2eec-4840-8570-a51aade0ec31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.217075] env[62109]: DEBUG oslo_vmware.rw_handles [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dcc4f2-ff7b-6420-8bb0-0ac6ead4a28a/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1048.217307] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Uploaded image e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1048.220422] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1048.220422] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c71c4941-3be6-47b9-a6d4-0d1618e72d32 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.228842] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1048.228842] env[62109]: value = "task-1117092" [ 1048.228842] env[62109]: _type = "Task" [ 1048.228842] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.237602] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117092, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.278808] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1048.397897] env[62109]: DEBUG nova.network.neutron [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.444394] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d08ce031-5e4e-46e2-8fe7-9df94e2a4da4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.455110] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6739be-5271-42a3-8e29-478a3ea3ca94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.484838] env[62109]: DEBUG nova.compute.manager [req-70e651b6-cc01-42b3-912f-a0b0a86e5c13 req-9b384c82-d56d-489d-ad38-712f95f61bf9 service nova] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Detach interface failed, port_id=4c8bdd11-0bd8-4fee-8ad1-38cf257db9e6, reason: Instance 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1048.530982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.536296] env[62109]: DEBUG nova.scheduler.client.report [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.740850] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117092, 'name': Destroy_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.904464] env[62109]: INFO nova.compute.manager [-] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Took 1.43 seconds to deallocate network for instance. [ 1049.042692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.777s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.045668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.515s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.047644] env[62109]: INFO nova.compute.claims [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.069942] env[62109]: INFO nova.scheduler.client.report [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Deleted allocations for instance 6b5a009e-28f5-4be7-8641-089abe359954 [ 1049.242019] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117092, 'name': Destroy_Task, 'duration_secs': 0.652699} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.245781] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Destroyed the VM [ 1049.245853] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1049.246109] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c3950df4-8413-4387-9fb0-ce2dfc645c02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.254489] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1049.254489] env[62109]: value = "task-1117093" [ 1049.254489] env[62109]: _type = "Task" [ 1049.254489] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.264460] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117093, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.288220] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1049.321596] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1049.321880] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1049.322145] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.322209] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1049.322343] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.322500] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1049.322741] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1049.322916] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1049.323104] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1049.323276] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1049.323563] env[62109]: DEBUG nova.virt.hardware [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1049.324331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b86810e-89af-4d92-8d62-aaac7fa7cfad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.334053] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e95c247-91ef-4cee-8c0c-9067ff945eb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.410539] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.582765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-906c1583-e68d-415d-b0fb-174e896fb318 tempest-AttachInterfacesTestJSON-1388106985 tempest-AttachInterfacesTestJSON-1388106985-project-member] Lock "6b5a009e-28f5-4be7-8641-089abe359954" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.765282] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117093, 'name': RemoveSnapshot_Task, 'duration_secs': 0.38822} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.765598] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1049.765882] env[62109]: DEBUG nova.compute.manager [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1049.766732] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9edaa8-51f0-4a0e-a24f-a3b3d1db08d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.917575] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Successfully updated port: 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.963508] env[62109]: DEBUG nova.compute.manager [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-vif-plugged-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.964014] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Acquiring lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.964339] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.964585] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.964876] env[62109]: DEBUG nova.compute.manager [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] No waiting events found dispatching network-vif-plugged-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1049.965113] env[62109]: WARNING nova.compute.manager [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received unexpected event network-vif-plugged-953462ff-1e7c-459f-aef0-b8cd1de48900 for instance with vm_state building and task_state spawning. [ 1049.965354] env[62109]: DEBUG nova.compute.manager [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.965563] env[62109]: DEBUG nova.compute.manager [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing instance network info cache due to event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1049.966065] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.966863] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.967122] env[62109]: DEBUG nova.network.neutron [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1050.237429] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cca639e-7ce0-42e0-b4e0-7d3074a927cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.247843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0c2543-26de-4dc1-9d49-9dfa37b0f56d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.278788] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3a9bd6-8bda-492e-ba92-f778186e63da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.287573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e211690-fc3e-49ec-914d-a56ed6a102bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.291938] env[62109]: INFO nova.compute.manager [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Shelve offloading [ 1050.294080] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1050.294334] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c57259b-3fff-400f-8252-3f709cc8a6cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.304286] env[62109]: DEBUG nova.compute.provider_tree [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.307067] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1050.307067] env[62109]: value = "task-1117096" [ 1050.307067] env[62109]: _type = "Task" [ 1050.307067] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.317270] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1050.317547] env[62109]: DEBUG nova.compute.manager [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1050.318208] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98943baa-5039-4cec-ab32-4a7bbb216d9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.325406] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.325406] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.325406] env[62109]: DEBUG nova.network.neutron [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1050.422765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.506561] env[62109]: DEBUG nova.network.neutron [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1050.610545] env[62109]: DEBUG nova.network.neutron [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.809937] env[62109]: DEBUG nova.scheduler.client.report [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1051.097072] env[62109]: DEBUG nova.network.neutron [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.114547] env[62109]: DEBUG oslo_concurrency.lockutils [req-43268139-2b3c-4943-b7b1-4bf0c93ba9d4 req-e26754e0-90cb-47d9-9131-8ac48005aaaf service nova] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.115206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.115372] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1051.319071] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.319725] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1051.325718] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.915s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.325718] env[62109]: DEBUG nova.objects.instance [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lazy-loading 'resources' on Instance uuid 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.604633] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.671296] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1051.831133] env[62109]: DEBUG nova.compute.utils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1051.835322] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1051.952616] env[62109]: DEBUG nova.network.neutron [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.018583] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaddbda-272c-4a71-a8ee-fe06b385dd9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.027312] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f5d4bc-3718-495e-8d2c-a3de5d3c7305 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.059620] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8721f090-1a54-4c78-9ae2-9c715941071e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.067929] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25afaf4-4f09-48ff-b2f5-d114ede448bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.083201] env[62109]: DEBUG nova.compute.provider_tree [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.136552] env[62109]: DEBUG nova.compute.manager [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-vif-unplugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1052.137343] env[62109]: DEBUG oslo_concurrency.lockutils [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.137343] env[62109]: DEBUG oslo_concurrency.lockutils [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.137343] env[62109]: DEBUG oslo_concurrency.lockutils [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.137343] env[62109]: DEBUG nova.compute.manager [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] No waiting events found dispatching network-vif-unplugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1052.137769] env[62109]: WARNING nova.compute.manager [req-584b8bde-b1bb-4a27-b8f2-2c87a11e9436 req-a3efbc36-a9e0-4a04-8cb8-0e844ca9f6b1 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received unexpected event network-vif-unplugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f for instance with vm_state shelved and task_state shelving_offloading. [ 1052.158877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "1f346681-b63f-4587-808c-bde4f0ba5831" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.158877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.165511] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1052.167303] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bde6f73-eaac-4b80-8c1b-1f97840454a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.175768] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1052.176033] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7cc1a8c-6cf9-4f3e-816f-0a0f6f67f446 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.260582] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1052.260829] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1052.261027] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleting the datastore file [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.261301] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31173906-00bc-4e20-8601-d989eb241b3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.269627] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1052.269627] env[62109]: value = "task-1117099" [ 1052.269627] env[62109]: _type = "Task" [ 1052.269627] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.278973] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.334458] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1052.456670] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.457465] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance network_info: |[{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1052.458128] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:c9:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f256cfee-512d-4192-9aca-6750fdb1cd4c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '953462ff-1e7c-459f-aef0-b8cd1de48900', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.465812] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Creating folder: Project (602e84ee01de44dabeb7c20fdbcb5795). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1052.467106] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ee795d7-05ab-476f-a731-a39c0c676c18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.479077] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Created folder: Project (602e84ee01de44dabeb7c20fdbcb5795) in parent group-v244329. [ 1052.479231] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Creating folder: Instances. Parent ref: group-v244531. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1052.479512] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f82a1d17-733d-41ae-b239-0d4cd68d7745 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.491012] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Created folder: Instances in parent group-v244531. [ 1052.492422] env[62109]: DEBUG oslo.service.loopingcall [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.492422] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1052.493073] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3128352-9636-40ea-97ae-d3d11e0ea672 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.519911] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.519911] env[62109]: value = "task-1117102" [ 1052.519911] env[62109]: _type = "Task" [ 1052.519911] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.527360] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117102, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.591072] env[62109]: DEBUG nova.scheduler.client.report [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.663195] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1052.759559] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1052.762107] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244529', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'name': 'volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc75898d-7856-4ecb-9640-ec30538fe90f', 'attached_at': '', 'detached_at': '', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'serial': '321c4adf-adc3-49cc-a7cc-31ff829b0f66'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1052.762107] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68b1079-87ab-4fee-8f70-5e5bb6aec1e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.785691] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6aae09-bbf6-4e28-9f78-7d69380ca8a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.814576] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66/volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.817952] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77946465-a57b-4df7-ab9c-12fa11e35ec4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.832015] env[62109]: DEBUG oslo_vmware.api [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157326} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.832921] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.832921] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1052.832921] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1052.845539] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1052.845539] env[62109]: value = "task-1117103" [ 1052.845539] env[62109]: _type = "Task" [ 1052.845539] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.856377] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117103, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.872778] env[62109]: INFO nova.scheduler.client.report [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted allocations for instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 [ 1053.029236] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117102, 'name': CreateVM_Task, 'duration_secs': 0.406033} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.029439] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1053.030340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.030536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.031110] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1053.031164] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ac66a3b-f4c0-418e-89a6-4ae37578b3c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.037459] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1053.037459] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f451fd-c55a-1438-922c-d7d3d05e574e" [ 1053.037459] env[62109]: _type = "Task" [ 1053.037459] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.048691] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f451fd-c55a-1438-922c-d7d3d05e574e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.093931] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.114088] env[62109]: INFO nova.scheduler.client.report [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Deleted allocations for instance 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384 [ 1053.185677] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.186144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.188881] env[62109]: INFO nova.compute.claims [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.348076] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1053.360574] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117103, 'name': ReconfigVM_Task, 'duration_secs': 0.398886} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.360878] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66/volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.365624] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdb4655c-79db-4520-b45b-5e3d251ba1ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.378231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.386811] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1053.387071] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1053.387238] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.387587] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1053.387808] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.387974] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1053.388210] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1053.388371] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1053.388543] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1053.388714] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1053.388887] env[62109]: DEBUG nova.virt.hardware [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1053.389802] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afabca7d-6440-46c3-bc8f-edb06bc44dd1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.393858] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1053.393858] env[62109]: value = "task-1117104" [ 1053.393858] env[62109]: _type = "Task" [ 1053.393858] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.401140] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b107ff-7e2a-4a1a-a2c1-4fa00faaa1b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.408119] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117104, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.418361] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.424010] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Creating folder: Project (360a9056a58f4f3aa46860690fe55934). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1053.424356] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6500d15b-19b6-4424-8f0e-8d3c90442db8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.438059] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Created folder: Project (360a9056a58f4f3aa46860690fe55934) in parent group-v244329. [ 1053.438296] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Creating folder: Instances. Parent ref: group-v244534. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1053.438578] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfd5a0d4-8efb-4912-ba1d-a8e9a1cb6fc9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.448720] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Created folder: Instances in parent group-v244534. [ 1053.448969] env[62109]: DEBUG oslo.service.loopingcall [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.449293] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1053.449682] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1ba3b0f-8aff-4b10-9717-1a37468ae4d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.468382] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.468382] env[62109]: value = "task-1117107" [ 1053.468382] env[62109]: _type = "Task" [ 1053.468382] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.478109] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117107, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.548322] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f451fd-c55a-1438-922c-d7d3d05e574e, 'name': SearchDatastore_Task, 'duration_secs': 0.022641} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.548656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.548881] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.549271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.549420] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.549545] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.549852] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-597fac0b-8e08-4bfe-becf-68d20a84d926 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.561867] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.561867] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1053.561867] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa77eb43-1dd9-4463-a874-1526e577ab38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.567803] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1053.567803] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a86bb8-c1b3-2f10-d04b-b59bc552ec09" [ 1053.567803] env[62109]: _type = "Task" [ 1053.567803] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.582660] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a86bb8-c1b3-2f10-d04b-b59bc552ec09, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.583449] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9a4a4c8-346a-41f2-914a-b9bc321feebd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.589805] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1053.589805] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dd96c-be54-7e50-9090-47d63d7cfafd" [ 1053.589805] env[62109]: _type = "Task" [ 1053.589805] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.598336] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dd96c-be54-7e50-9090-47d63d7cfafd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.623849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2eb243d1-e574-4835-8f04-a54e7bdb365f tempest-ServerRescueNegativeTestJSON-1052402546 tempest-ServerRescueNegativeTestJSON-1052402546-project-member] Lock "3da7aca9-5d65-4f5e-b0a3-7cf5308f0384" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.304s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.907515] env[62109]: DEBUG oslo_vmware.api [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117104, 'name': ReconfigVM_Task, 'duration_secs': 0.155317} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.907750] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244529', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'name': 'volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc75898d-7856-4ecb-9640-ec30538fe90f', 'attached_at': '', 'detached_at': '', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'serial': '321c4adf-adc3-49cc-a7cc-31ff829b0f66'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1053.979224] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117107, 'name': CreateVM_Task, 'duration_secs': 0.314606} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.979545] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1053.979820] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.980027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.980695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1053.981072] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e92cd0e-18b1-4be7-b748-d1308b9e3b7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.986559] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1053.986559] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52634a6d-6e95-facf-ea05-bff3003118ff" [ 1053.986559] env[62109]: _type = "Task" [ 1053.986559] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.995326] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52634a6d-6e95-facf-ea05-bff3003118ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.104026] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521dd96c-be54-7e50-9090-47d63d7cfafd, 'name': SearchDatastore_Task, 'duration_secs': 0.010258} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.104026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.104026] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/d9a02690-0e85-4a61-a794-e9a852ce77d7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1054.104026] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec1c5ca7-34ef-4eb3-9e74-11c58549ee28 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.112587] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1054.112587] env[62109]: value = "task-1117108" [ 1054.112587] env[62109]: _type = "Task" [ 1054.112587] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.121825] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.166216] env[62109]: DEBUG nova.compute.manager [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1054.166293] env[62109]: DEBUG nova.compute.manager [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing instance network info cache due to event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1054.166877] env[62109]: DEBUG oslo_concurrency.lockutils [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.166877] env[62109]: DEBUG oslo_concurrency.lockutils [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.166877] env[62109]: DEBUG nova.network.neutron [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1054.364029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcf8834-bb84-4b85-8dc5-92c9debe5c4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.374620] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd8690f-db47-4e63-b900-fd045bdb1900 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.421174] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a914ad3-6b23-4d9b-86e5-47bf19f3964d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.430218] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94494a5a-64d5-424d-8efc-06ac176af35f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.448230] env[62109]: DEBUG nova.compute.provider_tree [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.503639] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52634a6d-6e95-facf-ea05-bff3003118ff, 'name': SearchDatastore_Task, 'duration_secs': 0.025985} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.504167] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.504501] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.504826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.505357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.505678] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.506252] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b91997c4-1247-445f-9896-ef5d9e1c74b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.519774] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.520100] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1054.522648] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13996e4b-e016-45e2-89bd-f7922389a6db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.532918] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1054.532918] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f71711-2921-6759-79b1-2bf976da8ff0" [ 1054.532918] env[62109]: _type = "Task" [ 1054.532918] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.543648] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f71711-2921-6759-79b1-2bf976da8ff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.624793] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502253} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.625495] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/d9a02690-0e85-4a61-a794-e9a852ce77d7.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1054.625962] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.626186] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02520884-c224-435c-a28f-72f2973d9657 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.634511] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1054.634511] env[62109]: value = "task-1117110" [ 1054.634511] env[62109]: _type = "Task" [ 1054.634511] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.645265] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117110, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.952763] env[62109]: DEBUG nova.scheduler.client.report [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.966421] env[62109]: DEBUG nova.objects.instance [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'flavor' on Instance uuid bc75898d-7856-4ecb-9640-ec30538fe90f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.045135] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f71711-2921-6759-79b1-2bf976da8ff0, 'name': SearchDatastore_Task, 'duration_secs': 0.017791} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.046014] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd2310cc-c3b4-41fc-bd15-6ddebe5d34ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.052534] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1055.052534] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5223ebeb-e828-1668-abae-844712df5a73" [ 1055.052534] env[62109]: _type = "Task" [ 1055.052534] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.064062] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5223ebeb-e828-1668-abae-844712df5a73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.150035] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117110, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068163} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.150688] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.151285] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4492b7b8-50e5-4105-aa5c-c05ec4f5a945 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.182063] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/d9a02690-0e85-4a61-a794-e9a852ce77d7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.182063] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e280b532-e20a-495a-bd80-44bd41f6d60d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.200775] env[62109]: DEBUG nova.network.neutron [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updated VIF entry in instance network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1055.201469] env[62109]: DEBUG nova.network.neutron [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": null, "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbea956d1-ce", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.209600] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1055.209600] env[62109]: value = "task-1117111" [ 1055.209600] env[62109]: _type = "Task" [ 1055.209600] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.222277] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117111, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.341684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.342360] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.459118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.459702] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1055.462848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.085s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.463091] env[62109]: DEBUG nova.objects.instance [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'resources' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.470787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9d322be6-8f50-4385-a0d4-5c7d43af56ef tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.850s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.566402] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5223ebeb-e828-1668-abae-844712df5a73, 'name': SearchDatastore_Task, 'duration_secs': 0.009907} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.571033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.571033] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1055.572327] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f9ca569-e588-4538-a0e2-835dc81dcb79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.581620] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1055.581620] env[62109]: value = "task-1117112" [ 1055.581620] env[62109]: _type = "Task" [ 1055.581620] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.591462] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.706196] env[62109]: DEBUG oslo_concurrency.lockutils [req-23689174-371d-4cf2-9946-d1672a1a1669 req-f0867e32-f15f-49bd-8d03-dd6a8a55788a service nova] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.720628] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.845225] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1055.966549] env[62109]: DEBUG nova.compute.utils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1055.968681] env[62109]: DEBUG nova.objects.instance [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'numa_topology' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.970096] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1055.970686] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1056.053021] env[62109]: DEBUG nova.policy [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeaf07640b634a4fa87de22f1aa48e62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffb55e6de89486f8d6ef9160d2e8bfb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1056.082536] env[62109]: DEBUG nova.compute.manager [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1056.100799] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117112, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.221615] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117111, 'name': ReconfigVM_Task, 'duration_secs': 0.96668} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.222900] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Reconfigured VM instance instance-00000062 to attach disk [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/d9a02690-0e85-4a61-a794-e9a852ce77d7.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.223915] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03556539-66f9-48b8-ab94-502591e89fd7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.233019] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1056.233019] env[62109]: value = "task-1117113" [ 1056.233019] env[62109]: _type = "Task" [ 1056.233019] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.241764] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117113, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.294274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.376482] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.472683] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1056.474738] env[62109]: DEBUG nova.objects.base [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Object Instance<0f7445fa-c48e-4e79-a01a-1f8f70072de4> lazy-loaded attributes: resources,numa_topology {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1056.495109] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Successfully created port: 33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.606313] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117112, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598208} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.606830] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1056.607074] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.607358] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6db76c2c-5e03-410c-a3ab-102ba36bd0f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.615684] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1056.615684] env[62109]: value = "task-1117115" [ 1056.615684] env[62109]: _type = "Task" [ 1056.615684] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.616690] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.632300] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117115, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.672458] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cde2d66-a182-4944-baea-7feda2c7a1cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.681673] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcb6a1c-c679-4325-ad00-60d5a3fab177 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.716539] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e54c32-fafc-4e39-974b-f28289729926 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.724804] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742e7a14-0bc4-404f-8394-f6dcec792001 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.739746] env[62109]: DEBUG nova.compute.provider_tree [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.749462] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117113, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.127112] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117115, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07067} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.127404] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.128248] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90a61f5-fe28-48fd-b035-58854c8f362e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.148723] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.148723] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd04ba73-95fd-4c7a-96e3-4b48ed90d15c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.167868] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1057.167868] env[62109]: value = "task-1117116" [ 1057.167868] env[62109]: _type = "Task" [ 1057.167868] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.175528] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.245356] env[62109]: DEBUG nova.scheduler.client.report [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1057.252191] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117113, 'name': Rename_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.484378] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1057.517288] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1057.517555] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1057.517720] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.517907] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1057.518072] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.518229] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1057.518441] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1057.518609] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1057.518782] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1057.518950] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1057.519148] env[62109]: DEBUG nova.virt.hardware [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.519755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.520172] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.521987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d97e1da-826c-4640-b718-0a448a30d913 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.531745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97d2f2c-dc4f-4aee-b7a2-09148083e510 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.677954] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117116, 'name': ReconfigVM_Task, 'duration_secs': 0.328912} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.678256] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1057.678851] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8de5c963-2bdf-4b81-8ddc-4cfce0275b53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.685403] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1057.685403] env[62109]: value = "task-1117117" [ 1057.685403] env[62109]: _type = "Task" [ 1057.685403] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.692850] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117117, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.749255] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117113, 'name': Rename_Task, 'duration_secs': 1.158733} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.749546] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1057.749790] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-157e6e3a-2ba2-491d-9391-28309de06fad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.754151] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.291s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.757467] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.381s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.758864] env[62109]: INFO nova.compute.claims [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.761390] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1057.761390] env[62109]: value = "task-1117118" [ 1057.761390] env[62109]: _type = "Task" [ 1057.761390] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.769456] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.955987] env[62109]: DEBUG nova.compute.manager [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Received event network-vif-plugged-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.956240] env[62109]: DEBUG oslo_concurrency.lockutils [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] Acquiring lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.956519] env[62109]: DEBUG oslo_concurrency.lockutils [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] Lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.956642] env[62109]: DEBUG oslo_concurrency.lockutils [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] Lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.956819] env[62109]: DEBUG nova.compute.manager [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] No waiting events found dispatching network-vif-plugged-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1057.956994] env[62109]: WARNING nova.compute.manager [req-18d0e633-a194-4c12-b397-20e242ae7887 req-e48ea08b-f7dd-4789-941a-8d797c884eeb service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Received unexpected event network-vif-plugged-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 for instance with vm_state building and task_state spawning. [ 1058.025749] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1058.051667] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Successfully updated port: 33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.196596] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117117, 'name': Rename_Task, 'duration_secs': 0.140197} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.196920] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1058.197222] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32bd8cb8-1f5d-4b52-b770-0accdb503fcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.205562] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1058.205562] env[62109]: value = "task-1117119" [ 1058.205562] env[62109]: _type = "Task" [ 1058.205562] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.213896] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.272109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-00fd25fd-c0ae-4f4d-8454-36a75eeb587a tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.011s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.273350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.979s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.273550] env[62109]: INFO nova.compute.manager [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Unshelving [ 1058.281508] env[62109]: DEBUG oslo_vmware.api [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117118, 'name': PowerOnVM_Task, 'duration_secs': 0.515329} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.282313] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1058.282531] env[62109]: INFO nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1058.282723] env[62109]: DEBUG nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1058.283592] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbe711b-8f0f-4520-aa49-e08c4245c48f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.550489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.555806] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.555806] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquired lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.555806] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1058.717320] env[62109]: DEBUG oslo_vmware.api [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117119, 'name': PowerOnVM_Task, 'duration_secs': 0.458801} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.717608] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1058.717820] env[62109]: INFO nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Took 5.37 seconds to spawn the instance on the hypervisor. [ 1058.718007] env[62109]: DEBUG nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1058.718798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d52aba-28ae-499a-b7fc-fa8aa99b2d2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.803034] env[62109]: INFO nova.compute.manager [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Took 16.71 seconds to build instance. [ 1058.911527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab6f83b-14c2-4fff-a439-b22d3caf6134 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.919303] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7763dd80-d7ac-48a1-a7b3-0e5fb3fc5683 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.949165] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e51460-03dd-4131-8325-dcf17500552a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.957632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daebfd7e-d7b5-41de-b967-3f702ccb3dd0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.970889] env[62109]: DEBUG nova.compute.provider_tree [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.091197] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1059.240783] env[62109]: INFO nova.compute.manager [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Took 10.73 seconds to build instance. [ 1059.252554] env[62109]: DEBUG nova.network.neutron [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Updating instance_info_cache with network_info: [{"id": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "address": "fa:16:3e:ea:6f:90", "network": {"id": "f23595bc-4097-495b-bb87-d0a454417b23", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1705023301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bffb55e6de89486f8d6ef9160d2e8bfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b7f1b2-08", "ovs_interfaceid": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.296903] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.305091] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a3a70c7-3aac-4a5f-adb9-6b3985878aa2 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.224s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.475188] env[62109]: DEBUG nova.scheduler.client.report [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1059.569971] env[62109]: INFO nova.compute.manager [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Rescuing [ 1059.570489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.570489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.570489] env[62109]: DEBUG nova.network.neutron [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1059.742910] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b4f3c0db-7775-405d-b0a9-cd85aaee0016 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.245s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.754026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Releasing lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.754360] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Instance network_info: |[{"id": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "address": "fa:16:3e:ea:6f:90", "network": {"id": "f23595bc-4097-495b-bb87-d0a454417b23", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1705023301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bffb55e6de89486f8d6ef9160d2e8bfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b7f1b2-08", "ovs_interfaceid": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1059.754792] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:6f:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.762373] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Creating folder: Project (bffb55e6de89486f8d6ef9160d2e8bfb). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1059.762994] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-768f35b1-6993-4e94-b89b-7a055f283091 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.775272] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Created folder: Project (bffb55e6de89486f8d6ef9160d2e8bfb) in parent group-v244329. [ 1059.775472] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Creating folder: Instances. Parent ref: group-v244537. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1059.775723] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbe4c4bb-e1cb-4fd5-b304-0562215fec06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.786140] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Created folder: Instances in parent group-v244537. [ 1059.786395] env[62109]: DEBUG oslo.service.loopingcall [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.786595] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1059.786804] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5dda7d1-9d50-40c4-8d12-3d3b3f91308b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.807447] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.807447] env[62109]: value = "task-1117122" [ 1059.807447] env[62109]: _type = "Task" [ 1059.807447] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.815832] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117122, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.980344] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.981068] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1059.984658] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.368s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.987031] env[62109]: DEBUG nova.compute.manager [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Received event network-changed-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1059.987102] env[62109]: DEBUG nova.compute.manager [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Refreshing instance network info cache due to event network-changed-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1059.987589] env[62109]: DEBUG oslo_concurrency.lockutils [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] Acquiring lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.987589] env[62109]: DEBUG oslo_concurrency.lockutils [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] Acquired lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.987691] env[62109]: DEBUG nova.network.neutron [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Refreshing network info cache for port 33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1060.115626] env[62109]: INFO nova.compute.manager [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Rebuilding instance [ 1060.161983] env[62109]: DEBUG nova.compute.manager [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1060.163126] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee33b35a-4de6-4f4b-9232-3a24c229a5bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.176082] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.176330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.261067] env[62109]: DEBUG nova.network.neutron [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.318201] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117122, 'name': CreateVM_Task, 'duration_secs': 0.332291} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.318505] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1060.319257] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.319430] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.319765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1060.320039] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f584b5b-12be-4c0c-832a-26a9633766eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.325150] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1060.325150] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5249c20d-9856-4235-eaf0-24b0d407918e" [ 1060.325150] env[62109]: _type = "Task" [ 1060.325150] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.333584] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5249c20d-9856-4235-eaf0-24b0d407918e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.490775] env[62109]: DEBUG nova.compute.utils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1060.494285] env[62109]: INFO nova.compute.claims [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1060.499677] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1060.499850] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1060.578108] env[62109]: DEBUG nova.policy [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b39ff10ac8bd4e4abf04fd881e5125ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e5867b8b7e4ed18c5395baf46db66f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1060.674948] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1060.675274] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70dade31-4ff3-4b07-9006-450958a70bff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.678922] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1060.683661] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1060.683661] env[62109]: value = "task-1117123" [ 1060.683661] env[62109]: _type = "Task" [ 1060.683661] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.693770] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.764074] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.821672] env[62109]: DEBUG nova.network.neutron [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Updated VIF entry in instance network info cache for port 33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1060.822117] env[62109]: DEBUG nova.network.neutron [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Updating instance_info_cache with network_info: [{"id": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "address": "fa:16:3e:ea:6f:90", "network": {"id": "f23595bc-4097-495b-bb87-d0a454417b23", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1705023301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bffb55e6de89486f8d6ef9160d2e8bfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b7f1b2-08", "ovs_interfaceid": "33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.837746] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5249c20d-9856-4235-eaf0-24b0d407918e, 'name': SearchDatastore_Task, 'duration_secs': 0.010905} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.838093] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.838630] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.838739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.839198] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.839198] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.839477] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdb436e9-ddb1-4c1d-a0d1-954bf3672217 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.855853] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.856115] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1060.856933] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a08eb882-3b65-4f7f-a19f-43ae6e0907cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.863622] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1060.863622] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527be97d-1420-c547-c7d1-ab9a742e6e08" [ 1060.863622] env[62109]: _type = "Task" [ 1060.863622] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.872376] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527be97d-1420-c547-c7d1-ab9a742e6e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.958036] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Successfully created port: 13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1060.999450] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1061.004934] env[62109]: INFO nova.compute.resource_tracker [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating resource usage from migration 96835ba5-87a2-466a-a8f6-769d3f04e0cc [ 1061.201085] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.205994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.241522] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e855888-0dd6-44dd-9f48-f1ed58a4b222 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.249540] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d536df3-6573-4cd3-bfe0-ffcf737801d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.281666] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73358fa8-1345-4af7-bb58-664333d1c9f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.290067] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5953063-6a7b-46a1-afe5-326e9032abbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.296678] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1061.296929] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6affb83f-bd80-47bc-9407-f9e3a9290719 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.306563] env[62109]: DEBUG nova.compute.provider_tree [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.314669] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1061.314669] env[62109]: value = "task-1117124" [ 1061.314669] env[62109]: _type = "Task" [ 1061.314669] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.326411] env[62109]: DEBUG oslo_concurrency.lockutils [req-82c743ff-d591-4e42-ad7c-f53c1511a97f req-90cd7a2b-575d-4646-a67d-ae97a7cc6751 service nova] Releasing lock "refresh_cache-1f346681-b63f-4587-808c-bde4f0ba5831" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.326932] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.376256] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527be97d-1420-c547-c7d1-ab9a742e6e08, 'name': SearchDatastore_Task, 'duration_secs': 0.011097} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.377259] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06350aab-4ef2-4772-a042-d6974287d4c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.383944] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1061.383944] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522a590c-03f2-1943-63c2-df81769326b8" [ 1061.383944] env[62109]: _type = "Task" [ 1061.383944] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.393031] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522a590c-03f2-1943-63c2-df81769326b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.696211] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117123, 'name': PowerOffVM_Task, 'duration_secs': 0.937786} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.696596] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1061.696718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1061.697498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc726827-b865-4114-940b-5c3a590a56cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.706690] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1061.706954] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbbd49d2-e2b8-4189-aff9-e01e1071eeda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.737049] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1061.737271] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1061.737460] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Deleting the datastore file [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.737749] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5ac9290-b067-4a4b-97f2-c91c1dbf795f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.744920] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1061.744920] env[62109]: value = "task-1117126" [ 1061.744920] env[62109]: _type = "Task" [ 1061.744920] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.753716] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117126, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.810117] env[62109]: DEBUG nova.scheduler.client.report [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.827397] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117124, 'name': PowerOffVM_Task, 'duration_secs': 0.219797} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.827854] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1061.828668] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0420a6d9-62f2-4efd-b57d-d4e74004eb20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.849877] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7020f5c2-6d1c-4d33-85cb-d0f71e43c8cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.889964] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1061.890338] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0b7d227-7e10-48ae-8d5a-37d1acf67843 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.899038] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522a590c-03f2-1943-63c2-df81769326b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011731} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.899486] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.899758] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1f346681-b63f-4587-808c-bde4f0ba5831/1f346681-b63f-4587-808c-bde4f0ba5831.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1061.900079] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1061.900079] env[62109]: value = "task-1117127" [ 1061.900079] env[62109]: _type = "Task" [ 1061.900079] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.900317] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75bff48b-55ac-494d-9a4e-5c03a7f4f8b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.911030] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1061.911227] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.911497] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.911626] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.911808] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.912052] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-171e83ec-04eb-4f79-ab12-d4187fefc0d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.915798] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1061.915798] env[62109]: value = "task-1117128" [ 1061.915798] env[62109]: _type = "Task" [ 1061.915798] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.921551] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.921734] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1061.925700] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac9a5027-edc6-4be4-a6a9-ac918616b61b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.928143] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.931871] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1061.931871] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b9e18c-7bea-1fcc-0556-b0ba0e1a36d1" [ 1061.931871] env[62109]: _type = "Task" [ 1061.931871] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.939746] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b9e18c-7bea-1fcc-0556-b0ba0e1a36d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.016774] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1062.039202] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1062.039483] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1062.039649] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.039836] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1062.040010] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.040175] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1062.040469] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1062.040697] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1062.040893] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1062.041079] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1062.041277] env[62109]: DEBUG nova.virt.hardware [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1062.042237] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80a04bd-54a9-42d8-b6bd-27118386945c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.052341] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea4e708-764d-4f2e-a5b7-c0f60329f27c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.258583] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117126, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.463056} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.258583] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1062.258583] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1062.258583] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1062.314724] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.330s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.314985] env[62109]: INFO nova.compute.manager [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Migrating [ 1062.322115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.772s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.323768] env[62109]: INFO nova.compute.claims [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.408370] env[62109]: DEBUG nova.compute.manager [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Received event network-vif-plugged-13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.408524] env[62109]: DEBUG oslo_concurrency.lockutils [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] Acquiring lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.408741] env[62109]: DEBUG oslo_concurrency.lockutils [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.408919] env[62109]: DEBUG oslo_concurrency.lockutils [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.409107] env[62109]: DEBUG nova.compute.manager [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] No waiting events found dispatching network-vif-plugged-13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1062.409283] env[62109]: WARNING nova.compute.manager [req-6e49796c-4b79-40a9-83a7-7354ca99435f req-8ace9ea2-929e-4da4-992a-8da1e6c3c58f service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Received unexpected event network-vif-plugged-13c402cb-03d3-437a-b0c0-ef0bb1f76185 for instance with vm_state building and task_state spawning. [ 1062.427204] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117128, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.444430] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b9e18c-7bea-1fcc-0556-b0ba0e1a36d1, 'name': SearchDatastore_Task, 'duration_secs': 0.042387} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.445160] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a327fc0b-157b-464e-a610-c253380cabd6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.451110] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1062.451110] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524250ac-f7db-7209-a1f4-7746d7a88eba" [ 1062.451110] env[62109]: _type = "Task" [ 1062.451110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.461233] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524250ac-f7db-7209-a1f4-7746d7a88eba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.511732] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Successfully updated port: 13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.836861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.837228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.837228] env[62109]: DEBUG nova.network.neutron [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1062.927090] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797934} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.927395] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1f346681-b63f-4587-808c-bde4f0ba5831/1f346681-b63f-4587-808c-bde4f0ba5831.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1062.927626] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.927911] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76931959-b34c-43cb-beeb-1d8009679c78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.936414] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1062.936414] env[62109]: value = "task-1117129" [ 1062.936414] env[62109]: _type = "Task" [ 1062.936414] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.945915] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.963041] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524250ac-f7db-7209-a1f4-7746d7a88eba, 'name': SearchDatastore_Task, 'duration_secs': 0.027267} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.963041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.963041] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. {{(pid=62109) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1062.963349] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3873ced1-ad68-45a3-be9f-bfc70c0c6c51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.971293] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1062.971293] env[62109]: value = "task-1117130" [ 1062.971293] env[62109]: _type = "Task" [ 1062.971293] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.981672] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.015730] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.015863] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.016014] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1063.300823] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1063.301154] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1063.301370] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.301604] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1063.302045] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.302262] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1063.302503] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1063.302708] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1063.302952] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1063.303175] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1063.303363] env[62109]: DEBUG nova.virt.hardware [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1063.304397] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce39494-0c63-45e5-a71f-422f39f65e26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.314352] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51de103d-7273-4a8c-8d44-924aff1f430d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.331068] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1063.338798] env[62109]: DEBUG oslo.service.loopingcall [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1063.342386] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1063.345166] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daabadf2-8d82-4ded-a7f2-361e39c31b8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.366949] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1063.366949] env[62109]: value = "task-1117131" [ 1063.366949] env[62109]: _type = "Task" [ 1063.366949] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.381017] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117131, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.451086] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070091} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.451472] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.452274] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d76d21-0523-4585-8da2-8d7ab2fab039 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.485376] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 1f346681-b63f-4587-808c-bde4f0ba5831/1f346681-b63f-4587-808c-bde4f0ba5831.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.493732] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55999f31-d8e8-4f08-832b-ba21e491c3fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.514401] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117130, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.515910] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1063.515910] env[62109]: value = "task-1117132" [ 1063.515910] env[62109]: _type = "Task" [ 1063.515910] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.529223] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.569907] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1063.585436] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48180717-e2ab-4819-a2dc-b99548a12101 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.595629] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133214b0-f4e8-41fc-97f1-c71adc3f2692 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.628777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e78731-68f2-46f9-b06d-11a50bda6a7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.639945] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d25b05-1df7-4bea-8281-0830baac4b9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.657239] env[62109]: DEBUG nova.compute.provider_tree [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.723822] env[62109]: DEBUG nova.network.neutron [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.757937] env[62109]: DEBUG nova.network.neutron [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Updating instance_info_cache with network_info: [{"id": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "address": "fa:16:3e:b4:b4:c7", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13c402cb-03", "ovs_interfaceid": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.880932] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117131, 'name': CreateVM_Task, 'duration_secs': 0.336819} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.881180] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1063.881990] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.882217] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.882574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.882870] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50913f6e-0ff4-4a90-800d-b887db8fde8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.888357] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1063.888357] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52307ff9-5b4d-f404-5c39-0a207c40e1f2" [ 1063.888357] env[62109]: _type = "Task" [ 1063.888357] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.896934] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52307ff9-5b4d-f404-5c39-0a207c40e1f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.984150] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544869} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.984497] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk. [ 1063.985300] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a416734b-fe2d-424d-89f3-ed39a8e99490 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.010206] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.010594] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c50bbadb-d4fb-435f-9f1f-58c6d82332f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.034096] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117132, 'name': ReconfigVM_Task, 'duration_secs': 0.301608} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.035777] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 1f346681-b63f-4587-808c-bde4f0ba5831/1f346681-b63f-4587-808c-bde4f0ba5831.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.036885] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1064.036885] env[62109]: value = "task-1117133" [ 1064.036885] env[62109]: _type = "Task" [ 1064.036885] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.037139] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d18b291-116e-4e87-8f8a-538200e15e66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.047943] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.049501] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1064.049501] env[62109]: value = "task-1117134" [ 1064.049501] env[62109]: _type = "Task" [ 1064.049501] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.064770] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117134, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.164134] env[62109]: DEBUG nova.scheduler.client.report [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.228323] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.260724] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.261078] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance network_info: |[{"id": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "address": "fa:16:3e:b4:b4:c7", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13c402cb-03", "ovs_interfaceid": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1064.261604] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:b4:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13c402cb-03d3-437a-b0c0-ef0bb1f76185', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.269257] env[62109]: DEBUG oslo.service.loopingcall [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.269491] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1064.269725] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d374b7ec-ec08-4a78-9fc1-a846b49ad93f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.290710] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.290710] env[62109]: value = "task-1117135" [ 1064.290710] env[62109]: _type = "Task" [ 1064.290710] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.298770] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117135, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.398701] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52307ff9-5b4d-f404-5c39-0a207c40e1f2, 'name': SearchDatastore_Task, 'duration_secs': 0.026514} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.398960] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.399217] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.399457] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.399614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.399795] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.400074] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51021d97-34bc-4a0b-b406-7894e0575ff8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.408188] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.408409] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1064.409107] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b43084f-7539-443b-9f49-852571c9c9ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.414418] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1064.414418] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb10f0-e3e3-3709-bc1f-c8fb0fa09f2a" [ 1064.414418] env[62109]: _type = "Task" [ 1064.414418] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.422906] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb10f0-e3e3-3709-bc1f-c8fb0fa09f2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.432946] env[62109]: DEBUG nova.compute.manager [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Received event network-changed-13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.433158] env[62109]: DEBUG nova.compute.manager [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Refreshing instance network info cache due to event network-changed-13c402cb-03d3-437a-b0c0-ef0bb1f76185. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1064.433378] env[62109]: DEBUG oslo_concurrency.lockutils [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] Acquiring lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.433548] env[62109]: DEBUG oslo_concurrency.lockutils [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] Acquired lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.433685] env[62109]: DEBUG nova.network.neutron [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Refreshing network info cache for port 13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1064.548592] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117133, 'name': ReconfigVM_Task, 'duration_secs': 0.336147} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.548985] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Reconfigured VM instance instance-00000062 to attach disk [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8-rescue.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.549712] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df47b63-9343-40aa-81bf-8774680a9372 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.581431] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-950b49ba-95f4-4fd0-a851-55bfc6d12f53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.591282] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117134, 'name': Rename_Task, 'duration_secs': 0.155875} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.591603] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1064.592283] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3153142-daae-4dc7-adbb-e7df54d9feac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.598612] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1064.598612] env[62109]: value = "task-1117136" [ 1064.598612] env[62109]: _type = "Task" [ 1064.598612] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.604760] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1064.604760] env[62109]: value = "task-1117137" [ 1064.604760] env[62109]: _type = "Task" [ 1064.604760] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.612018] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.614928] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.669354] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.669991] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1064.673074] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.376s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.673300] env[62109]: DEBUG nova.objects.instance [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'pci_requests' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.801367] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117135, 'name': CreateVM_Task, 'duration_secs': 0.390728} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.801570] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1064.802280] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.802471] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.802826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1064.803087] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a057c1ac-bb1f-446c-8c68-224506ea8df2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.807987] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1064.807987] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e1fd19-e8d7-7cc7-ba8d-13b7fdbfb760" [ 1064.807987] env[62109]: _type = "Task" [ 1064.807987] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.815651] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e1fd19-e8d7-7cc7-ba8d-13b7fdbfb760, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.925913] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb10f0-e3e3-3709-bc1f-c8fb0fa09f2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011149} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.926833] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-902b7c98-81ab-4193-96f7-ced6d00f5ae5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.933607] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1064.933607] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52086392-03c1-85c1-605c-b0118534b3c3" [ 1064.933607] env[62109]: _type = "Task" [ 1064.933607] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.943812] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52086392-03c1-85c1-605c-b0118534b3c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.118922] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117136, 'name': ReconfigVM_Task, 'duration_secs': 0.322249} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.119187] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117137, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.119451] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1065.119711] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92e5385b-b16c-42ba-8324-2a08799d3512 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.127504] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1065.127504] env[62109]: value = "task-1117138" [ 1065.127504] env[62109]: _type = "Task" [ 1065.127504] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.138072] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.141840] env[62109]: DEBUG nova.network.neutron [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Updated VIF entry in instance network info cache for port 13c402cb-03d3-437a-b0c0-ef0bb1f76185. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1065.142370] env[62109]: DEBUG nova.network.neutron [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Updating instance_info_cache with network_info: [{"id": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "address": "fa:16:3e:b4:b4:c7", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13c402cb-03", "ovs_interfaceid": "13c402cb-03d3-437a-b0c0-ef0bb1f76185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.176336] env[62109]: DEBUG nova.compute.utils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1065.181216] env[62109]: DEBUG nova.objects.instance [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'numa_topology' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.182249] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1065.182566] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1065.224341] env[62109]: DEBUG nova.policy [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bba19358571a47f3baafbc9662845961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '430353b9a427408494b462b49f11354a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1065.320583] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e1fd19-e8d7-7cc7-ba8d-13b7fdbfb760, 'name': SearchDatastore_Task, 'duration_secs': 0.035419} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.320970] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.321240] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.321512] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.445940] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52086392-03c1-85c1-605c-b0118534b3c3, 'name': SearchDatastore_Task, 'duration_secs': 0.0108} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.446260] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.446492] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1065.446769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.446959] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.447191] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3c3a5ff-ba54-4a11-87d4-8060d32b59ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.449519] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b50dd9f-cc9d-4424-9b7d-033454efb9ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.181233] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Successfully created port: a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1066.184254] env[62109]: DEBUG oslo_concurrency.lockutils [req-31526831-012d-4c0d-9f76-4fb1d038bb97 req-51f5ad14-5bce-406f-b824-a95bf48cacdf service nova] Releasing lock "refresh_cache-ad3d92ec-29a9-4f01-8117-47352c244e1e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.184600] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1066.187248] env[62109]: INFO nova.compute.claims [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.200801] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1066.200801] env[62109]: value = "task-1117139" [ 1066.200801] env[62109]: _type = "Task" [ 1066.200801] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.201830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3a028c-1460-48dc-8d5d-19a1b1cf74c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.206383] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1066.206614] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1066.210357] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5256b82-b6ea-49c3-87ea-cdb388fd8ebc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.233644] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1066.237609] env[62109]: DEBUG oslo_vmware.api [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117138, 'name': PowerOnVM_Task, 'duration_secs': 0.450114} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.237609] env[62109]: DEBUG oslo_vmware.api [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117137, 'name': PowerOnVM_Task, 'duration_secs': 0.612665} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.238998] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1066.240975] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1066.241077] env[62109]: INFO nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1066.241239] env[62109]: DEBUG nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1066.242342] env[62109]: DEBUG nova.compute.manager [None req-efa37faa-5f77-45c5-ad02-eb7ad034854f tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1066.247216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9b85fd-982c-46fa-90c2-468868da6feb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.250249] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120a9cd9-711e-4832-86ff-bdce3c441f65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.253605] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.253767] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1066.253767] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52281c02-47d9-e167-0f6b-5b592a895cdc" [ 1066.253767] env[62109]: _type = "Task" [ 1066.253767] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.273169] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52281c02-47d9-e167-0f6b-5b592a895cdc, 'name': SearchDatastore_Task, 'duration_secs': 0.011438} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.273912] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ad8bbd-ce9a-4dad-ae95-732efe6d1954 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.283019] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1066.283019] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5209b9f5-91fe-0d38-5a10-5de0cba2bb7c" [ 1066.283019] env[62109]: _type = "Task" [ 1066.283019] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.288374] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5209b9f5-91fe-0d38-5a10-5de0cba2bb7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.721713] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117139, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.740582] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1066.741264] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7043144b-127f-43b3-a1ec-00266f0242ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.752519] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1066.752519] env[62109]: value = "task-1117140" [ 1066.752519] env[62109]: _type = "Task" [ 1066.752519] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.770162] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117140, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.779876] env[62109]: INFO nova.compute.manager [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Took 13.61 seconds to build instance. [ 1066.792423] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5209b9f5-91fe-0d38-5a10-5de0cba2bb7c, 'name': SearchDatastore_Task, 'duration_secs': 0.010264} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.794478] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.794478] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1066.794478] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45626b39-322a-4180-8f6b-a0d8d9303329 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.804191] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1066.804191] env[62109]: value = "task-1117141" [ 1066.804191] env[62109]: _type = "Task" [ 1066.804191] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.815643] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.100657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "1f346681-b63f-4587-808c-bde4f0ba5831" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.197799] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1067.222052] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.721868} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.222223] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1067.222393] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.224537] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1067.224831] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1067.225059] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.225305] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1067.225494] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.225689] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1067.225926] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1067.226140] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1067.226315] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1067.226487] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1067.226664] env[62109]: DEBUG nova.virt.hardware [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.226932] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48d94264-cd65-4170-8278-63dba74e05b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.229358] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d7148b-9ebc-4359-a346-24d12e487cda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.240198] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26033dbf-2be6-47ca-b080-fc2bf51e2d13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.244960] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1067.244960] env[62109]: value = "task-1117142" [ 1067.244960] env[62109]: _type = "Task" [ 1067.244960] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.266033] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.270736] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117140, 'name': PowerOffVM_Task, 'duration_secs': 0.255717} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.270978] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1067.271184] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1067.286300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6f13bc57-0b9e-4d33-ba50-cfd1ea13f221 tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.128s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.286776] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.186s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.287018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.287261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.287390] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.291452] env[62109]: INFO nova.compute.manager [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Terminating instance [ 1067.293462] env[62109]: DEBUG nova.compute.manager [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1067.293658] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1067.294463] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c461fe1-bf25-4e40-803e-614db57af66c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.304840] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1067.307900] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f61b7caf-a99b-4051-903f-47e8661346cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.314764] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117141, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.318447] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1067.318447] env[62109]: value = "task-1117143" [ 1067.318447] env[62109]: _type = "Task" [ 1067.318447] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.328097] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.393889] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71342898-7822-4343-9ddf-20ed89f6a3f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.406158] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a5d814-1a23-4986-ac31-6be0d6b49f58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.437837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f9dc77-5f18-416f-a188-ec9920806bbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.446586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817c4242-4997-4b72-b860-1d9b6ed757fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.464784] env[62109]: DEBUG nova.compute.provider_tree [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.653786] env[62109]: DEBUG nova.compute.manager [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1067.654217] env[62109]: DEBUG oslo_concurrency.lockutils [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.654400] env[62109]: DEBUG oslo_concurrency.lockutils [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.654586] env[62109]: DEBUG oslo_concurrency.lockutils [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.654766] env[62109]: DEBUG nova.compute.manager [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] No waiting events found dispatching network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1067.654966] env[62109]: WARNING nova.compute.manager [req-f4358ea2-7094-4d82-993e-44f5929a37ee req-acc90292-13eb-405d-8bd0-2fe4501294a6 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received unexpected event network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb for instance with vm_state building and task_state spawning. [ 1067.759376] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066567} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.760179] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Successfully updated port: a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.762961] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.762961] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5a0279-9565-48b7-a5d1-0a9183ed7dae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.777535] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1067.777812] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1067.777994] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.778238] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1067.778406] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.778562] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1067.778768] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1067.778931] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1067.779116] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1067.779286] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1067.779474] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.793659] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.794874] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7de2f620-72c4-4e62-813f-c0019d870eaf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.805089] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed6a462-1049-4f3e-8325-6293a5cd199f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.832046] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1067.832046] env[62109]: value = "task-1117145" [ 1067.832046] env[62109]: _type = "Task" [ 1067.832046] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.832379] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1067.832379] env[62109]: value = "task-1117144" [ 1067.832379] env[62109]: _type = "Task" [ 1067.832379] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.838296] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.909356} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.838550] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117143, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.844845] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1067.845104] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.845416] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f90d2efd-bed5-4916-b2d7-8399464c0e4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.855302] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.861482] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.861991] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1067.861991] env[62109]: value = "task-1117146" [ 1067.861991] env[62109]: _type = "Task" [ 1067.861991] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.871694] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.968840] env[62109]: DEBUG nova.scheduler.client.report [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1068.263627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.263627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.263627] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1068.338400] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117143, 'name': PowerOffVM_Task, 'duration_secs': 0.527327} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.339237] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1068.339237] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1068.346876] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4dc32fc-5b59-4ddc-9790-93994bdd2740 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.351800] env[62109]: DEBUG nova.compute.manager [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1068.352041] env[62109]: DEBUG nova.compute.manager [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing instance network info cache due to event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1068.352482] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.352722] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.352938] env[62109]: DEBUG nova.network.neutron [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1068.362098] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.367273] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117145, 'name': ReconfigVM_Task, 'duration_secs': 0.384807} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.370985] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1068.381876] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076557} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.382428] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1068.383158] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f3dd7b-3691-40bb-87a5-29c73e41dbb0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.408743] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.409159] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aafdcb88-3cea-4c38-bdb0-3c77a26b02fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.430190] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1068.430469] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1068.430652] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Deleting the datastore file [datastore1] 1f346681-b63f-4587-808c-bde4f0ba5831 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.431246] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36502492-e7d5-417a-937e-02a6e23f2309 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.435126] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1068.435126] env[62109]: value = "task-1117148" [ 1068.435126] env[62109]: _type = "Task" [ 1068.435126] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.441463] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for the task: (returnval){ [ 1068.441463] env[62109]: value = "task-1117149" [ 1068.441463] env[62109]: _type = "Task" [ 1068.441463] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.448015] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.453788] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117149, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.474206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.801s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.476401] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.270s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.477926] env[62109]: INFO nova.compute.claims [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.512915] env[62109]: INFO nova.network.neutron [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating port bea956d1-ceb2-4342-bef6-6a37fef7ec4f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1068.801929] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1068.851716] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117144, 'name': ReconfigVM_Task, 'duration_secs': 0.814745} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.852023] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23/42e26a9c-fdba-4d5c-86b1-146fce2e6b23.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.852711] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-751a15d8-f5fd-4f41-97d1-6da768545c98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.860921] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1068.860921] env[62109]: value = "task-1117150" [ 1068.860921] env[62109]: _type = "Task" [ 1068.860921] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.869568] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117150, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.877743] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1068.878107] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1068.878409] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.878706] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1068.878964] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.879250] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1068.879607] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1068.879872] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1068.880174] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1068.880529] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1068.880845] env[62109]: DEBUG nova.virt.hardware [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1068.886644] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.887393] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cf10fc6-3df7-4bef-8a55-5db23decd87c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.908498] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1068.908498] env[62109]: value = "task-1117151" [ 1068.908498] env[62109]: _type = "Task" [ 1068.908498] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.921553] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117151, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.948994] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117148, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.950514] env[62109]: DEBUG nova.network.neutron [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.955133] env[62109]: DEBUG oslo_vmware.api [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Task: {'id': task-1117149, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.447816} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.955420] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.955643] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1068.957029] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1068.957029] env[62109]: INFO nova.compute.manager [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1068.957029] env[62109]: DEBUG oslo.service.loopingcall [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1068.957029] env[62109]: DEBUG nova.compute.manager [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1068.957029] env[62109]: DEBUG nova.network.neutron [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1069.230708] env[62109]: DEBUG nova.network.neutron [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updated VIF entry in instance network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1069.231135] env[62109]: DEBUG nova.network.neutron [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.373124] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117150, 'name': Rename_Task, 'duration_secs': 0.230884} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.373467] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1069.373723] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9062910-c16a-45a7-bf03-4af937d6d0e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.383793] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1069.383793] env[62109]: value = "task-1117152" [ 1069.383793] env[62109]: _type = "Task" [ 1069.383793] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.401514] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.420664] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.446903] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117148, 'name': ReconfigVM_Task, 'duration_secs': 0.547096} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.447316] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.448401] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-834f4480-d7fc-4815-884f-ad4d9dc97ae9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.455992] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.456373] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance network_info: |[{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1069.456824] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1069.456824] env[62109]: value = "task-1117153" [ 1069.456824] env[62109]: _type = "Task" [ 1069.456824] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.457226] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:c9:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a04abfce-a9e7-413a-94d6-d14ed8f205cb', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1069.468415] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating folder: Project (430353b9a427408494b462b49f11354a). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1069.468753] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9be3d53e-b149-4f62-9778-ffb53c9bd3c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.483824] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117153, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.488762] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created folder: Project (430353b9a427408494b462b49f11354a) in parent group-v244329. [ 1069.488935] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating folder: Instances. Parent ref: group-v244542. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1069.490249] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83addeac-1cd8-47c5-8681-5bf1a913fdad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.500107] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created folder: Instances in parent group-v244542. [ 1069.500476] env[62109]: DEBUG oslo.service.loopingcall [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1069.500729] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1069.501435] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39e25a38-ef79-414f-b0bd-049be42d99dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.535577] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1069.535577] env[62109]: value = "task-1117156" [ 1069.535577] env[62109]: _type = "Task" [ 1069.535577] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.546239] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117156, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.682133] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de39cbb4-fd16-46a7-b003-366175c947c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.688603] env[62109]: DEBUG nova.compute.manager [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1069.688805] env[62109]: DEBUG nova.compute.manager [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing instance network info cache due to event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1069.689087] env[62109]: DEBUG oslo_concurrency.lockutils [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.689295] env[62109]: DEBUG oslo_concurrency.lockutils [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.689519] env[62109]: DEBUG nova.network.neutron [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1069.693740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e5cdab-13ef-41ca-86c6-861481f5c084 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.697079] env[62109]: DEBUG nova.network.neutron [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.734235] env[62109]: DEBUG oslo_concurrency.lockutils [req-6f82ac07-a0e8-43ad-a86a-f8f06872d94d req-8bfeaa8c-c5d3-4977-9557-d035ffffc3b7 service nova] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.735121] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f22bb-238e-4a56-8cc6-d9be8ae83d10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.743778] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f912f10-eaa3-4eb0-87a9-30b777dabc0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.759446] env[62109]: DEBUG nova.compute.provider_tree [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.896135] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117152, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.921912] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.982969] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117153, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.019425] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.019630] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.019807] env[62109]: DEBUG nova.network.neutron [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1070.046220] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117156, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.200836] env[62109]: INFO nova.compute.manager [-] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Took 1.24 seconds to deallocate network for instance. [ 1070.264058] env[62109]: DEBUG nova.scheduler.client.report [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.398366] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117152, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.423242] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117151, 'name': ReconfigVM_Task, 'duration_secs': 1.339745} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.423523] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1070.424327] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249f86e8-4c19-4394-94ba-d7678a25ec30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.427599] env[62109]: DEBUG nova.network.neutron [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updated VIF entry in instance network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1070.427928] env[62109]: DEBUG nova.network.neutron [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.452971] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.453973] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f68b4ac2-2ac8-42c0-a629-386e124e2f42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.475780] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1070.475780] env[62109]: value = "task-1117157" [ 1070.475780] env[62109]: _type = "Task" [ 1070.475780] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.484637] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117153, 'name': Rename_Task, 'duration_secs': 0.879952} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.488601] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1070.488901] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.489142] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbfcfdde-7ea5-4af3-ab90-b87ec33bc2d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.497133] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1070.497133] env[62109]: value = "task-1117158" [ 1070.497133] env[62109]: _type = "Task" [ 1070.497133] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.503773] env[62109]: DEBUG nova.compute.manager [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1070.504009] env[62109]: DEBUG nova.compute.manager [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing instance network info cache due to event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1070.504257] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.504406] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.504574] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1070.511427] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.557864] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117156, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.709868] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.752955] env[62109]: DEBUG nova.network.neutron [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.769289] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.769727] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1070.773078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.063s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.773339] env[62109]: DEBUG nova.objects.instance [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lazy-loading 'resources' on Instance uuid 1f346681-b63f-4587-808c-bde4f0ba5831 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.896913] env[62109]: DEBUG oslo_vmware.api [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117152, 'name': PowerOnVM_Task, 'duration_secs': 1.271716} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.897205] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1070.897443] env[62109]: DEBUG nova.compute.manager [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1070.898264] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368de1e3-1069-4562-ab85-acb649f1aed6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.931555] env[62109]: DEBUG oslo_concurrency.lockutils [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.932033] env[62109]: DEBUG nova.compute.manager [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Received event network-vif-deleted-33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1070.932481] env[62109]: INFO nova.compute.manager [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Neutron deleted interface 33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8; detaching it from the instance and deleting it from the info cache [ 1070.932806] env[62109]: DEBUG nova.network.neutron [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.987827] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117157, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.010366] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117158, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.048179] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117156, 'name': CreateVM_Task, 'duration_secs': 1.054124} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.048441] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1071.049178] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.049681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.049924] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1071.050236] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05434a1b-194b-4c5b-b8a3-6a28fb7491e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.055692] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1071.055692] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb8f67-7da8-a868-9226-8e8a491dbf12" [ 1071.055692] env[62109]: _type = "Task" [ 1071.055692] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.065122] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb8f67-7da8-a868-9226-8e8a491dbf12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.210702] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updated VIF entry in instance network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1071.211457] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.255509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.274713] env[62109]: DEBUG nova.compute.utils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1071.278547] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1071.278939] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1071.329474] env[62109]: DEBUG nova.policy [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1071.419856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.425045] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a48134ffdc76f7cb6aed4d94660850bb',container_format='bare',created_at=2024-10-03T08:00:20Z,direct_url=,disk_format='vmdk',id=e6951ef3-e8f5-49f6-b66e-499fcd3e2d42,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-101297104-shelved',owner='093c284d31de414cb583d501864456c8',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-10-03T08:00:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1071.425312] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1071.425490] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.425662] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1071.425813] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.425966] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1071.427508] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1071.427662] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1071.427858] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1071.428035] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1071.428336] env[62109]: DEBUG nova.virt.hardware [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1071.429835] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2a5113-53b5-497a-8bfe-af5077225f0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.438273] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9275154-98b7-4b9a-afc0-b1210f4439bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.445831] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe92e589-0e2e-4337-90ff-f2c21a4ca77b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.458461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a593175-cb3f-4849-b6dc-b23b828eb2e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.484171] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:0e:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bea956d1-ceb2-4342-bef6-6a37fef7ec4f', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.492029] env[62109]: DEBUG oslo.service.loopingcall [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.496378] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1071.496672] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8be7455d-08ca-44c3-ba28-f68d1f76a421 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.525626] env[62109]: DEBUG nova.compute.manager [req-9ac30f44-d201-4142-aba5-b111bba2c5c7 req-ce3d8ef1-c92a-4608-8502-9940490139d9 service nova] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Detach interface failed, port_id=33b7f1b2-08bf-4c57-84cb-6a71dfc7b5e8, reason: Instance 1f346681-b63f-4587-808c-bde4f0ba5831 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1071.538219] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117157, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.540134] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.540134] env[62109]: value = "task-1117159" [ 1071.540134] env[62109]: _type = "Task" [ 1071.540134] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.547509] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117158, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.556578] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117159, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.567183] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fb8f67-7da8-a868-9226-8e8a491dbf12, 'name': SearchDatastore_Task, 'duration_secs': 0.059787} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.571452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.571738] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.572075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.572141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.572324] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.572860] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85cb9d17-cbca-46f8-a35c-039deab56e43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.583628] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.583852] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1071.584661] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19def56c-769e-48e6-88c1-7b2dd98f901b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.592690] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1071.592690] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d54df5-f700-73fe-b082-89567178f0ba" [ 1071.592690] env[62109]: _type = "Task" [ 1071.592690] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.601717] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d54df5-f700-73fe-b082-89567178f0ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.603471] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964e1b95-007d-4794-a5fe-3d53ed52a969 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.611013] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f360175b-ec68-4c10-8e71-a7fb7ac9ac00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.644140] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f33af7-eeab-49db-bf00-a53bfe213794 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.654291] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39366b6-bb2a-49fd-a4ee-939bfc233a72 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.674119] env[62109]: DEBUG nova.compute.provider_tree [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.715028] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.715028] env[62109]: DEBUG nova.compute.manager [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.715028] env[62109]: DEBUG nova.compute.manager [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing instance network info cache due to event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1071.715028] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.715028] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.715028] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1071.742055] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Successfully created port: fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.787132] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1071.882955] env[62109]: DEBUG nova.compute.manager [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.882955] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.883134] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.883325] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.883569] env[62109]: DEBUG nova.compute.manager [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] No waiting events found dispatching network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1071.883757] env[62109]: WARNING nova.compute.manager [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received unexpected event network-vif-plugged-bea956d1-ceb2-4342-bef6-6a37fef7ec4f for instance with vm_state shelved_offloaded and task_state spawning. [ 1071.883922] env[62109]: DEBUG nova.compute.manager [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.884095] env[62109]: DEBUG nova.compute.manager [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing instance network info cache due to event network-changed-bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1071.884284] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.884444] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.885335] env[62109]: DEBUG nova.network.neutron [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Refreshing network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1071.994225] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117157, 'name': ReconfigVM_Task, 'duration_secs': 1.247556} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.994530] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.994816] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1072.037929] env[62109]: DEBUG oslo_vmware.api [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117158, 'name': PowerOnVM_Task, 'duration_secs': 1.454344} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.038307] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1072.038838] env[62109]: INFO nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1072.039062] env[62109]: DEBUG nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1072.040721] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b502c5ba-dbd7-4d93-9239-651dbfd9b584 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.060028] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117159, 'name': CreateVM_Task, 'duration_secs': 0.413017} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.060028] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1072.060028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.060028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.060028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1072.060028] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-414b25dd-9c1d-4eb5-9e0c-a7f432578118 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.063680] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1072.063680] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e5db8d-b9b5-f9ab-9209-0bc4d6fd096a" [ 1072.063680] env[62109]: _type = "Task" [ 1072.063680] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.075163] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e5db8d-b9b5-f9ab-9209-0bc4d6fd096a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.104716] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d54df5-f700-73fe-b082-89567178f0ba, 'name': SearchDatastore_Task, 'duration_secs': 0.015523} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.105708] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d9f49e-9e04-4f6d-81f2-a0c9876e8826 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.111495] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1072.111495] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52deb3cd-c9c2-b47b-ebcc-165fcff2f0c1" [ 1072.111495] env[62109]: _type = "Task" [ 1072.111495] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.120900] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52deb3cd-c9c2-b47b-ebcc-165fcff2f0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.179023] env[62109]: DEBUG nova.scheduler.client.report [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1072.295234] env[62109]: INFO nova.virt.block_device [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Booting with volume 35d2eab5-8592-4a6e-b3bb-80c16e77808d at /dev/sda [ 1072.339463] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af6927f1-119d-478c-be8c-0ef83b059588 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.350864] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3577b0ab-35a0-43c9-b5cb-b95d004fcc7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.365772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.365772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.365772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.365772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.365772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.366447] env[62109]: INFO nova.compute.manager [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Terminating instance [ 1072.368182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "refresh_cache-42e26a9c-fdba-4d5c-86b1-146fce2e6b23" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.368342] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquired lock "refresh_cache-42e26a9c-fdba-4d5c-86b1-146fce2e6b23" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.368508] env[62109]: DEBUG nova.network.neutron [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1072.396715] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25fc8612-9ddb-4878-a523-949a223e811a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.409024] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f247e8-185f-4e15-bbad-69a5d780c5f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.440277] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7368773-642d-41ee-8a5e-1c06b046d6a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.447141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7659169c-1235-44fb-a44f-77b1d6fddb00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.462503] env[62109]: DEBUG nova.virt.block_device [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating existing volume attachment record: 06d4260e-5285-4e0b-a2bc-b1222f9126c6 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1072.510133] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c975862-f849-42cf-9e78-40360c4d5d6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.538534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6baadff1-737b-426c-bc87-ebae9b95ad9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.566378] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1072.582032] env[62109]: INFO nova.compute.manager [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Took 16.23 seconds to build instance. [ 1072.587699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.588028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Processing image e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1072.588334] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.588490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.588676] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.588944] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29a9573b-f79b-4cef-ba20-ad40f7d7bc9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.599567] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.599857] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1072.601094] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be8966ae-8f9f-438e-8815-f54270b83c75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.607491] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1072.607491] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52400eb9-1d9a-7a99-0b1f-f0a4620819ca" [ 1072.607491] env[62109]: _type = "Task" [ 1072.607491] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.622333] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52400eb9-1d9a-7a99-0b1f-f0a4620819ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.626122] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52deb3cd-c9c2-b47b-ebcc-165fcff2f0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.012265} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.626986] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updated VIF entry in instance network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1072.627350] env[62109]: DEBUG nova.network.neutron [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.628573] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.628832] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1072.631419] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6c209c5-80ac-4e04-9e99-a04cecac2629 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.641421] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1072.641421] env[62109]: value = "task-1117160" [ 1072.641421] env[62109]: _type = "Task" [ 1072.641421] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.651360] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117160, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.671034] env[62109]: DEBUG nova.network.neutron [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updated VIF entry in instance network info cache for port bea956d1-ceb2-4342-bef6-6a37fef7ec4f. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1072.671521] env[62109]: DEBUG nova.network.neutron [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.681819] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.684870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.265s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.685085] env[62109]: DEBUG nova.objects.instance [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1072.710257] env[62109]: INFO nova.scheduler.client.report [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Deleted allocations for instance 1f346681-b63f-4587-808c-bde4f0ba5831 [ 1072.764931] env[62109]: DEBUG nova.compute.manager [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1072.765188] env[62109]: DEBUG nova.compute.manager [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing instance network info cache due to event network-changed-953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1072.765338] env[62109]: DEBUG oslo_concurrency.lockutils [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] Acquiring lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.784190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.784449] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.784660] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.784848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.785033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.787167] env[62109]: INFO nova.compute.manager [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Terminating instance [ 1072.789056] env[62109]: DEBUG nova.compute.manager [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1072.789263] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1072.790156] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4b06b5-5052-4e6b-824d-9c52eed4d718 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.802747] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1072.803462] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e1a6753-a5e9-4cff-96f4-f876fb3e2ff3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.813300] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1072.813300] env[62109]: value = "task-1117161" [ 1072.813300] env[62109]: _type = "Task" [ 1072.813300] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.822769] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.890157] env[62109]: DEBUG nova.network.neutron [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1072.962157] env[62109]: DEBUG nova.network.neutron [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.082092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bea7f26-5b89-456d-8c3a-925d4db1283f tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.739s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.122553] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1073.123091] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Fetch image to [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152/OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1073.123091] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Downloading stream optimized image e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 to [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152/OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152.vmdk on the data store datastore1 as vApp {{(pid=62109) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1073.123375] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Downloading image file data e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 to the ESX as VM named 'OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152' {{(pid=62109) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1073.131741] env[62109]: DEBUG oslo_concurrency.lockutils [req-28d577be-28fd-477c-a01d-385726d2682e req-15146ae2-8faa-4997-806e-81ca1699ca43 service nova] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.132263] env[62109]: DEBUG oslo_concurrency.lockutils [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] Acquired lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.132468] env[62109]: DEBUG nova.network.neutron [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Refreshing network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1073.159401] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117160, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.181241] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec287e2c-8ee3-4433-9647-88a8bb2106ab req-1737646a-1f12-48a0-baa8-a8a536bf57df service nova] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.220439] env[62109]: DEBUG oslo_concurrency.lockutils [None req-837a1298-5f46-4912-977e-b7a9ae15db4c tempest-InstanceActionsNegativeTestJSON-1881575774 tempest-InstanceActionsNegativeTestJSON-1881575774-project-member] Lock "1f346681-b63f-4587-808c-bde4f0ba5831" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.933s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.225044] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1073.225044] env[62109]: value = "resgroup-9" [ 1073.225044] env[62109]: _type = "ResourcePool" [ 1073.225044] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1073.225044] env[62109]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2b69bd06-15f1-435d-a810-15f8eb4c53a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.255119] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lease: (returnval){ [ 1073.255119] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1073.255119] env[62109]: _type = "HttpNfcLease" [ 1073.255119] env[62109]: } obtained for vApp import into resource pool (val){ [ 1073.255119] env[62109]: value = "resgroup-9" [ 1073.255119] env[62109]: _type = "ResourcePool" [ 1073.255119] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1073.255407] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the lease: (returnval){ [ 1073.255407] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1073.255407] env[62109]: _type = "HttpNfcLease" [ 1073.255407] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1073.263086] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1073.263086] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1073.263086] env[62109]: _type = "HttpNfcLease" [ 1073.263086] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1073.323891] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117161, 'name': PowerOffVM_Task, 'duration_secs': 0.380504} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.324205] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1073.324374] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1073.324631] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25a50bd8-14f0-433b-b9bc-2ec9f6bc26a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.406442] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1073.406753] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1073.406975] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Deleting the datastore file [datastore2] d9a02690-0e85-4a61-a794-e9a852ce77d7 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.407391] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d4c6b49-8e46-4eba-9071-96e4c6cd38d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.414872] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for the task: (returnval){ [ 1073.414872] env[62109]: value = "task-1117164" [ 1073.414872] env[62109]: _type = "Task" [ 1073.414872] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.424344] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.453213] env[62109]: INFO nova.compute.manager [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Rebuilding instance [ 1073.466804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Releasing lock "refresh_cache-42e26a9c-fdba-4d5c-86b1-146fce2e6b23" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.466804] env[62109]: DEBUG nova.compute.manager [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1073.466804] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1073.468023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f531920-9054-466c-ae1c-5351ff3126a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.478127] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1073.481879] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8c3fcc-5030-400d-882c-e6c071f4c6a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.489733] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1073.489733] env[62109]: value = "task-1117165" [ 1073.489733] env[62109]: _type = "Task" [ 1073.489733] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.501262] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.504335] env[62109]: DEBUG nova.compute.manager [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1073.505163] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97de0de9-a599-493f-b552-8086df33ad83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.524736] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Successfully updated port: fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.652230] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117160, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577257} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.652430] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1073.652629] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1073.652887] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75f3e419-3d06-4756-9b45-234fa465eb06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.673915] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1073.673915] env[62109]: value = "task-1117166" [ 1073.673915] env[62109]: _type = "Task" [ 1073.673915] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.682132] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.701395] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4946ef7-4fef-4a69-9bc4-9b137fe040b3 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.766114] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1073.766114] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1073.766114] env[62109]: _type = "HttpNfcLease" [ 1073.766114] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1073.850659] env[62109]: DEBUG nova.network.neutron [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updated VIF entry in instance network info cache for port 953462ff-1e7c-459f-aef0-b8cd1de48900. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1073.851059] env[62109]: DEBUG nova.network.neutron [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [{"id": "953462ff-1e7c-459f-aef0-b8cd1de48900", "address": "fa:16:3e:f5:c9:21", "network": {"id": "b4705a23-aa69-4f1d-ab3d-220ae9042d81", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1020451027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "602e84ee01de44dabeb7c20fdbcb5795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap953462ff-1e", "ovs_interfaceid": "953462ff-1e7c-459f-aef0-b8cd1de48900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.908823] env[62109]: DEBUG nova.compute.manager [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Received event network-vif-plugged-fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.909074] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.909292] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.909464] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.909638] env[62109]: DEBUG nova.compute.manager [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] No waiting events found dispatching network-vif-plugged-fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1073.909806] env[62109]: WARNING nova.compute.manager [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Received unexpected event network-vif-plugged-fad3649e-36ac-4e54-8d8b-9a77b8da03ee for instance with vm_state building and task_state block_device_mapping. [ 1073.909970] env[62109]: DEBUG nova.compute.manager [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Received event network-changed-fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.911371] env[62109]: DEBUG nova.compute.manager [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Refreshing instance network info cache due to event network-changed-fad3649e-36ac-4e54-8d8b-9a77b8da03ee. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1073.911601] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Acquiring lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.911753] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Acquired lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.911922] env[62109]: DEBUG nova.network.neutron [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Refreshing network info cache for port fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1073.924793] env[62109]: DEBUG oslo_vmware.api [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Task: {'id': task-1117164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203971} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.925322] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.925520] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1073.925728] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1073.925917] env[62109]: INFO nova.compute.manager [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1073.926173] env[62109]: DEBUG oslo.service.loopingcall [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.927232] env[62109]: DEBUG nova.compute.manager [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1073.927232] env[62109]: DEBUG nova.network.neutron [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1073.999200] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117165, 'name': PowerOffVM_Task, 'duration_secs': 0.126995} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.999522] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1073.999730] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1074.000054] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43690068-e622-412b-8a9a-e0161e821da8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.018421] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1074.018729] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0919fb55-04b4-4a11-a04c-42912162281d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.025829] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1074.025829] env[62109]: value = "task-1117168" [ 1074.025829] env[62109]: _type = "Task" [ 1074.025829] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.030391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1074.030697] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1074.030897] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1074.031237] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Deleting the datastore file [datastore1] 42e26a9c-fdba-4d5c-86b1-146fce2e6b23 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.031857] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c92e5d3a-96c2-418c-80bf-9705ef337547 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.037281] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117168, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.042679] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for the task: (returnval){ [ 1074.042679] env[62109]: value = "task-1117169" [ 1074.042679] env[62109]: _type = "Task" [ 1074.042679] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.051684] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.184875] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090071} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.185529] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1074.186201] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f713c53-cb3c-4306-b055-de34ce9c019f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.210138] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.210955] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b83b5cd-4ccb-4d44-9978-abe53ea1b22c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.235771] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1074.235771] env[62109]: value = "task-1117170" [ 1074.235771] env[62109]: _type = "Task" [ 1074.235771] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.245609] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117170, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.265837] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1074.265837] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1074.265837] env[62109]: _type = "HttpNfcLease" [ 1074.265837] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1074.266459] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1074.266459] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f73e69-8fb6-6fc3-6427-ae17b99bc376" [ 1074.266459] env[62109]: _type = "HttpNfcLease" [ 1074.266459] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1074.266781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed4d081-f524-487c-9b04-2f24b3c43ad6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.270070] env[62109]: DEBUG nova.network.neutron [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Port 8b33420f-fd6d-43fc-b4e1-141768c6024b binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1074.277018] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1074.277843] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1074.347186] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-92c82ed0-9ab1-4077-bcf0-4327b62af56c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.353759] env[62109]: DEBUG oslo_concurrency.lockutils [req-8cd6edf0-00d7-4dc9-b65a-5f322d52aab3 req-5a73fd8c-8a1f-4655-8825-54956be785ff service nova] Releasing lock "refresh_cache-d9a02690-0e85-4a61-a794-e9a852ce77d7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.447742] env[62109]: DEBUG nova.network.neutron [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1074.527354] env[62109]: DEBUG nova.network.neutron [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.540703] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117168, 'name': PowerOffVM_Task, 'duration_secs': 0.379814} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.541723] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1074.541952] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1074.542831] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb67fe97-455b-435d-bbed-b84bd4dcc4d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.557687] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1074.562081] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1074.562599] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1074.562817] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1074.562991] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1074.563200] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1074.563414] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1074.563590] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1074.563801] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1074.563979] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1074.564300] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1074.564502] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1074.564706] env[62109]: DEBUG nova.virt.hardware [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1074.564993] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1df75efc-d687-437a-8a4a-94a22b107b9b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.566670] env[62109]: DEBUG oslo_vmware.api [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Task: {'id': task-1117169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100172} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.570202] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d77e804-8abd-4374-8610-0dbb52e8059d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.573446] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1074.573678] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1074.573936] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1074.574145] env[62109]: INFO nova.compute.manager [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1074.574455] env[62109]: DEBUG oslo.service.loopingcall [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.577941] env[62109]: DEBUG nova.compute.manager [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1074.578056] env[62109]: DEBUG nova.network.neutron [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.589390] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b235f4-f51f-497b-82ab-135790546092 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.598675] env[62109]: DEBUG nova.network.neutron [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1074.686505] env[62109]: DEBUG nova.network.neutron [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.750743] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117170, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.959869] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1074.959869] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1074.959982] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleting the datastore file [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.960598] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ea797e6-22ad-430a-8c3e-bf13ed692348 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.967651] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1074.967651] env[62109]: value = "task-1117172" [ 1074.967651] env[62109]: _type = "Task" [ 1074.967651] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.978564] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.034927] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5d96c04-ce5e-4ad6-aa15-91d43d03ccfa req-e0b4fa2b-c13a-4b6d-8901-c71ab5d0ba92 service nova] Releasing lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.035055] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.035701] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1075.101510] env[62109]: DEBUG nova.network.neutron [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.190092] env[62109]: INFO nova.compute.manager [-] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Took 1.26 seconds to deallocate network for instance. [ 1075.250842] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117170, 'name': ReconfigVM_Task, 'duration_secs': 0.568802} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.253145] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1075.254231] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cab3a2b6-4e9a-4c77-9996-cb7079779bf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.261948] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1075.261948] env[62109]: value = "task-1117173" [ 1075.261948] env[62109]: _type = "Task" [ 1075.261948] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.272068] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117173, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.298041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.298041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.298041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.478716] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156418} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.479229] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.479560] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1075.479875] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1075.571211] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1075.571904] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1075.573186] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf5dc34-1221-4dfd-981c-31eeaf36c744 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.577973] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1075.585105] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1075.585304] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1075.585550] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-dfe5b69d-54cb-4ca6-be34-6d41e1c60ac1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.604678] env[62109]: INFO nova.compute.manager [-] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Took 1.03 seconds to deallocate network for instance. [ 1075.703778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.704080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.704314] env[62109]: DEBUG nova.objects.instance [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lazy-loading 'resources' on Instance uuid d9a02690-0e85-4a61-a794-e9a852ce77d7 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.772800] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117173, 'name': Rename_Task, 'duration_secs': 0.159843} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.773165] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1075.773442] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bf5e98a-64c6-4a07-80d2-07577ee4bf08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.778593] env[62109]: DEBUG oslo_vmware.rw_handles [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebc8c2-75f4-6534-9cc0-e2658d0539f7/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1075.778907] env[62109]: INFO nova.virt.vmwareapi.images [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Downloaded image file data e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 [ 1075.780101] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5be169b-7d33-45a4-8661-9aca5f5d0ee2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.784258] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1075.784258] env[62109]: value = "task-1117174" [ 1075.784258] env[62109]: _type = "Task" [ 1075.784258] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.802042] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8b86985-f901-4cca-9805-60c4ec4e14bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.807757] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117174, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.820413] env[62109]: DEBUG nova.network.neutron [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [{"id": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "address": "fa:16:3e:6b:9d:5c", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfad3649e-36", "ovs_interfaceid": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.841413] env[62109]: INFO nova.virt.vmwareapi.images [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] The imported VM was unregistered [ 1075.844524] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1075.844845] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Creating directory with path [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1075.845191] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf871996-408c-446b-ae52-cdd3f5f11fa8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.857996] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Created directory with path [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1075.858233] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152/OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152.vmdk to [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk. {{(pid=62109) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1075.858537] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-0d4fa5eb-e096-4c8d-a4cd-f642072bf57c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.870246] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1075.870246] env[62109]: value = "task-1117176" [ 1075.870246] env[62109]: _type = "Task" [ 1075.870246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.879992] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.937909] env[62109]: DEBUG nova.compute.manager [req-bc80b3d0-3d97-4616-95ef-eb20d995e4cc req-11c81bc5-c7e7-46fe-ac31-3f0bdfc82af8 service nova] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Received event network-vif-deleted-953462ff-1e7c-459f-aef0-b8cd1de48900 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1076.112740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.299117] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117174, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.323978] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.324497] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance network_info: |[{"id": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "address": "fa:16:3e:6b:9d:5c", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfad3649e-36", "ovs_interfaceid": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1076.325158] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:9d:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fad3649e-36ac-4e54-8d8b-9a77b8da03ee', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.338539] env[62109]: DEBUG oslo.service.loopingcall [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.343359] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.344051] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebdf2c28-6c1b-4d80-94e3-6e9af71aa9c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.380680] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.380680] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.380680] env[62109]: DEBUG nova.network.neutron [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.398884] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.403669] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.403669] env[62109]: value = "task-1117177" [ 1076.403669] env[62109]: _type = "Task" [ 1076.403669] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.416232] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117177, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.449867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61edaafc-8b09-4bc9-8b8b-52b40515451c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.460014] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72939b5e-8be2-48eb-b5b4-edbf396139a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.505820] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1036dce3-1162-4a83-b779-10d99b4d5cb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.517207] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba79052c-36f2-4cc6-b1e1-1b83bc728b55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.541341] env[62109]: DEBUG nova.compute.provider_tree [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.545431] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.545721] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.545899] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.546125] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.546377] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.546590] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.546931] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.547232] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.547537] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.547800] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.548080] env[62109]: DEBUG nova.virt.hardware [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.549199] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9856437d-5402-48cb-b070-15944c6690a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.562848] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd6a2ba-2a8f-49e3-868b-17742731445f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.584426] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:b4:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13c402cb-03d3-437a-b0c0-ef0bb1f76185', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.596209] env[62109]: DEBUG oslo.service.loopingcall [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.596209] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.596209] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18e12557-1a3b-45bd-90ab-9a4be6c9532e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.624847] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.624847] env[62109]: value = "task-1117178" [ 1076.624847] env[62109]: _type = "Task" [ 1076.624847] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.636412] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117178, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.800724] env[62109]: DEBUG oslo_vmware.api [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117174, 'name': PowerOnVM_Task, 'duration_secs': 0.717107} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.801080] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1076.801359] env[62109]: INFO nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1076.801686] env[62109]: DEBUG nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1076.802568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420960bc-c5d3-47fa-abcc-9659746dde02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.898036] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.918410] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117177, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.054575] env[62109]: DEBUG nova.scheduler.client.report [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1077.137361] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117178, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.325873] env[62109]: DEBUG nova.network.neutron [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.327408] env[62109]: INFO nova.compute.manager [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Took 18.80 seconds to build instance. [ 1077.396332] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.420073] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117177, 'name': CreateVM_Task, 'duration_secs': 0.56814} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.420266] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1077.421820] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244530', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'name': 'volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea', 'attached_at': '', 'detached_at': '', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'serial': '35d2eab5-8592-4a6e-b3bb-80c16e77808d'}, 'device_type': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'attachment_id': '06d4260e-5285-4e0b-a2bc-b1222f9126c6', 'guest_format': None, 'boot_index': 0, 'volume_type': None}], 'swap': None} {{(pid=62109) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1077.421820] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Root volume attach. Driver type: vmdk {{(pid=62109) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1077.423037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8380a35-0dac-4f35-9e05-e7aad2fe45b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.436344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01efe016-25bc-41da-b362-400801e8e26a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.445586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9a5987-df8f-4279-b98c-6bb70af72201 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.455258] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-5947f25a-257f-4e7a-8d04-2bb2fa50c4d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.467521] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1077.467521] env[62109]: value = "task-1117179" [ 1077.467521] env[62109]: _type = "Task" [ 1077.467521] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.478917] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.560650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.563261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.451s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.563621] env[62109]: DEBUG nova.objects.instance [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lazy-loading 'resources' on Instance uuid 42e26a9c-fdba-4d5c-86b1-146fce2e6b23 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.595789] env[62109]: INFO nova.scheduler.client.report [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Deleted allocations for instance d9a02690-0e85-4a61-a794-e9a852ce77d7 [ 1077.638450] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117178, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.830257] env[62109]: DEBUG oslo_concurrency.lockutils [None req-107119ae-08df-44fe-b192-45071200fa6b tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.310s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.831045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.898943] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.981921] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 42%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.112576] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5b45e07c-ab52-4fc7-93a3-5f6169d18cc8 tempest-ServerRescueTestJSONUnderV235-1661278404 tempest-ServerRescueTestJSONUnderV235-1661278404-project-member] Lock "d9a02690-0e85-4a61-a794-e9a852ce77d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.328s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.152405] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117178, 'name': CreateVM_Task, 'duration_secs': 1.502167} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.154250] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1078.156164] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.156571] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.159705] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1078.159705] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67a8b785-4aae-4f19-a915-d2f82d7b1d6f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.167522] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1078.167522] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526ae784-4c9c-023a-220d-c6468b69aead" [ 1078.167522] env[62109]: _type = "Task" [ 1078.167522] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.183471] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526ae784-4c9c-023a-220d-c6468b69aead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.287379] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4a598a-70aa-4bbb-a5ec-b573791d67f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.301486] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7d8716-3363-4375-bbea-9a09b82e989e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.337047] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57343468-2f93-4abc-8b99-8340bd9fb68a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.345030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07c7bd2-8c11-4ffe-8225-48ec95785be6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.353745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba69dd7-a411-4f13-8d24-0a6f76ffa663 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.361387] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a3e4fb-7d3c-4272-b5e0-f86b4010f597 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.374801] env[62109]: DEBUG nova.compute.provider_tree [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.397165] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.495591] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 56%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.680435] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526ae784-4c9c-023a-220d-c6468b69aead, 'name': SearchDatastore_Task, 'duration_secs': 0.077947} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.680435] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.681887] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.681887] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.681887] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.681887] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.681887] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-518a5d53-7cc3-4bb3-a118-405928ab582b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.694464] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.694464] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1078.695160] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aebba320-7bd4-44a1-ae19-2194738be202 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.703474] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1078.703474] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521cf00e-c2d0-b1e4-5677-1cd1184e922d" [ 1078.703474] env[62109]: _type = "Task" [ 1078.703474] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.715700] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521cf00e-c2d0-b1e4-5677-1cd1184e922d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.877173] env[62109]: DEBUG nova.scheduler.client.report [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1078.917463] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117176, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.5796} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.917833] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152/OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152.vmdk to [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk. [ 1078.918051] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Cleaning up location [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1078.918250] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a9e49ed8-7b64-498d-96cd-fee73609c152 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.918557] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ba227b5-3ac1-4b6c-a5c8-d2db3e480f4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.930229] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1078.930229] env[62109]: value = "task-1117180" [ 1078.930229] env[62109]: _type = "Task" [ 1078.930229] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.944775] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.988774] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 71%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.217707] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521cf00e-c2d0-b1e4-5677-1cd1184e922d, 'name': SearchDatastore_Task, 'duration_secs': 0.013033} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.218741] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bda2756-3d63-48b5-88d4-4b92576cf353 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.227311] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1079.227311] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5a931-7171-ae42-d9ff-3d7926da6fcc" [ 1079.227311] env[62109]: _type = "Task" [ 1079.227311] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.244710] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b5a931-7171-ae42-d9ff-3d7926da6fcc, 'name': SearchDatastore_Task, 'duration_secs': 0.012236} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.244956] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.245786] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1079.245786] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b05f262-b9ec-4072-a145-f248c82b951e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.259253] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1079.259253] env[62109]: value = "task-1117181" [ 1079.259253] env[62109]: _type = "Task" [ 1079.259253] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.268834] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.307245] env[62109]: DEBUG nova.compute.manager [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1079.307245] env[62109]: DEBUG nova.compute.manager [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing instance network info cache due to event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1079.311498] env[62109]: DEBUG oslo_concurrency.lockutils [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.311911] env[62109]: DEBUG oslo_concurrency.lockutils [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.312342] env[62109]: DEBUG nova.network.neutron [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1079.385311] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.421648] env[62109]: INFO nova.scheduler.client.report [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Deleted allocations for instance 42e26a9c-fdba-4d5c-86b1-146fce2e6b23 [ 1079.448025] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043747} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.448025] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.448025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.448025] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk to [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1079.448025] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21c39cfb-88db-429f-96bd-03e1cd02e545 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.461046] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1079.461046] env[62109]: value = "task-1117182" [ 1079.461046] env[62109]: _type = "Task" [ 1079.461046] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.477436] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.493698] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 84%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.582300] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feace52a-24c7-4e20-891f-1955aafed08b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.613045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73137ff7-3fd9-4b82-aa8d-3c3ea37a3c80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.621355] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1079.770714] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117181, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.931796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-88cc8a86-5cef-4c3a-8d2c-04a9e4389802 tempest-ServerShowV254Test-284853901 tempest-ServerShowV254Test-284853901-project-member] Lock "42e26a9c-fdba-4d5c-86b1-146fce2e6b23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.567s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.976723] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.992290] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 97%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.131953] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1080.132309] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc754b5a-ac22-4092-98da-a8f8ef4fe487 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.146168] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1080.146168] env[62109]: value = "task-1117183" [ 1080.146168] env[62109]: _type = "Task" [ 1080.146168] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.160869] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.278693] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542315} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.279154] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1080.279488] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.280683] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f867d1-81e2-4735-8bb8-b080b4c9738a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.292092] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1080.292092] env[62109]: value = "task-1117184" [ 1080.292092] env[62109]: _type = "Task" [ 1080.292092] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.306748] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.365034] env[62109]: DEBUG nova.network.neutron [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updated VIF entry in instance network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1080.365034] env[62109]: DEBUG nova.network.neutron [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.473570] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.490144] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 97%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.662081] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117183, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.780338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "5308edf2-155c-452f-9240-45ff444826aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.780338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.807229] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089286} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.807229] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.807229] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a1ae36-8f07-46dc-8dde-31e3c73d9b11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.835473] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.836491] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7658e7a7-8a7c-4e95-ace7-731ec8185be4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.860532] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1080.860532] env[62109]: value = "task-1117185" [ 1080.860532] env[62109]: _type = "Task" [ 1080.860532] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.868191] env[62109]: DEBUG oslo_concurrency.lockutils [req-87c2514e-c3ca-4851-af01-25a544b63e57 req-95a50fd3-42e8-49d3-a388-d37b3e6c9679 service nova] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.872623] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.972797] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.987949] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 98%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.164342] env[62109]: DEBUG oslo_vmware.api [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117183, 'name': PowerOnVM_Task, 'duration_secs': 0.629987} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.164732] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1081.164947] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99bd3299-dba9-4a4e-acd9-22b17d09bbc5 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance 'bc75898d-7856-4ecb-9640-ec30538fe90f' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1081.284647] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1081.373106] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.472998] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.488738] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task} progress is 98%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.809603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.809997] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.811841] env[62109]: INFO nova.compute.claims [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.872015] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117185, 'name': ReconfigVM_Task, 'duration_secs': 0.596822} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.872297] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e/ad3d92ec-29a9-4f01-8117-47352c244e1e.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.872961] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b582f071-94da-47a5-89b1-26af68354d7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.880737] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1081.880737] env[62109]: value = "task-1117186" [ 1081.880737] env[62109]: _type = "Task" [ 1081.880737] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.890434] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117186, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.972605] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.990011] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117179, 'name': RelocateVM_Task, 'duration_secs': 4.511255} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.990396] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1081.990615] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244530', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'name': 'volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea', 'attached_at': '', 'detached_at': '', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'serial': '35d2eab5-8592-4a6e-b3bb-80c16e77808d'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1081.991466] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31db0313-3c40-4318-95e5-2bd1def853ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.008680] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfea3eb-18e4-4c5b-a02d-8a2141ca9c4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.034191] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d/volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1082.034562] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba5a5fa-539a-4311-887e-2556fae02af8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.056428] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1082.056428] env[62109]: value = "task-1117187" [ 1082.056428] env[62109]: _type = "Task" [ 1082.056428] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.066117] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.391711] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117186, 'name': Rename_Task, 'duration_secs': 0.268403} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.391920] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1082.392181] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-145f9516-e764-4c31-9a66-36c47e0c39fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.399394] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1082.399394] env[62109]: value = "task-1117188" [ 1082.399394] env[62109]: _type = "Task" [ 1082.399394] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.411177] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.472573] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117182, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.689985} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.472885] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42/e6951ef3-e8f5-49f6-b66e-499fcd3e2d42.vmdk to [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1082.473682] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028fe4ee-86a2-4b36-bd77-ea7c6ac3cbb2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.495513] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1082.495933] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-324ec8e7-6817-4fb4-ad87-58ef7d6f5678 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.515466] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1082.515466] env[62109]: value = "task-1117189" [ 1082.515466] env[62109]: _type = "Task" [ 1082.515466] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.523385] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117189, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.567089] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.909094] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.962579] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd767e5-70eb-4ccd-b1bd-0b7619c0b61a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.970663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cdfcf3-62ca-4da6-8fad-e1b240a8b6c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.003794] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec10af3-6cb2-4c6b-9abe-9117acb3f711 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.012943] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6835257-0c5b-425f-a029-40e0e838c215 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.025197] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.033060] env[62109]: DEBUG nova.compute.provider_tree [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.068458] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117187, 'name': ReconfigVM_Task, 'duration_secs': 0.965609} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.068778] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d/volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.073740] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a9df7bc-2847-425e-9c91-272237bcb95f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.089539] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1083.089539] env[62109]: value = "task-1117190" [ 1083.089539] env[62109]: _type = "Task" [ 1083.089539] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.103045] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117190, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.410906] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117188, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.527135] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117189, 'name': ReconfigVM_Task, 'duration_secs': 0.995211} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.528307] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4/0f7445fa-c48e-4e79-a01a-1f8f70072de4.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.529087] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da269d8f-8b7d-4f6e-a5b2-6e66ca18cdfb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.536032] env[62109]: DEBUG nova.scheduler.client.report [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.541998] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1083.541998] env[62109]: value = "task-1117191" [ 1083.541998] env[62109]: _type = "Task" [ 1083.541998] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.553162] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117191, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.602562] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117190, 'name': ReconfigVM_Task, 'duration_secs': 0.326739} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.602978] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244530', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'name': 'volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea', 'attached_at': '', 'detached_at': '', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'serial': '35d2eab5-8592-4a6e-b3bb-80c16e77808d'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1083.603546] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bb1f2f8-4de4-46f3-a917-72475ec0c1d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.611132] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1083.611132] env[62109]: value = "task-1117192" [ 1083.611132] env[62109]: _type = "Task" [ 1083.611132] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.622039] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117192, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.911498] env[62109]: DEBUG oslo_vmware.api [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117188, 'name': PowerOnVM_Task, 'duration_secs': 1.211232} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.911856] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1083.912048] env[62109]: DEBUG nova.compute.manager [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1083.912853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b9cea9-0a2c-48d9-ba37-afc4f16498df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.041897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.041897] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1084.053609] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117191, 'name': Rename_Task, 'duration_secs': 0.192112} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.053874] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1084.055028] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a06c54c-25bb-4d36-9c6b-850d0fadb455 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.061968] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1084.061968] env[62109]: value = "task-1117193" [ 1084.061968] env[62109]: _type = "Task" [ 1084.061968] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.071989] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.117455] env[62109]: DEBUG nova.network.neutron [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Port 8b33420f-fd6d-43fc-b4e1-141768c6024b binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1084.117716] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.117873] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.118090] env[62109]: DEBUG nova.network.neutron [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1084.125075] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117192, 'name': Rename_Task, 'duration_secs': 0.193964} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.125338] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1084.125579] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-962ebafa-e555-447d-888b-59a17c322086 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.134765] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1084.134765] env[62109]: value = "task-1117194" [ 1084.134765] env[62109]: _type = "Task" [ 1084.134765] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.143637] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.429275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.429440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.429763] env[62109]: DEBUG nova.objects.instance [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1084.550189] env[62109]: DEBUG nova.compute.utils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1084.551913] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1084.552129] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1084.574118] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117193, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.598676] env[62109]: DEBUG nova.policy [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ed2edd161d54411b1ece8859a62167b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4dc2c367ee14732b9f6666688a71ad9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1084.645339] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117194, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.969581] env[62109]: DEBUG nova.network.neutron [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.012462] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Successfully created port: bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.055708] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1085.077054] env[62109]: DEBUG oslo_vmware.api [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117193, 'name': PowerOnVM_Task, 'duration_secs': 0.623598} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.077054] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1085.150939] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117194, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.192092] env[62109]: DEBUG nova.compute.manager [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.192092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba1e39d-015d-4382-88f8-0f134925b471 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.437900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ddb10164-23d6-44b4-a8a6-163ef320a103 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.472811] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.646417] env[62109]: DEBUG oslo_vmware.api [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117194, 'name': PowerOnVM_Task, 'duration_secs': 1.358605} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.646718] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1085.646932] env[62109]: INFO nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 11.08 seconds to spawn the instance on the hypervisor. [ 1085.647138] env[62109]: DEBUG nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.647984] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca98fe19-ab1f-40df-85f2-36d763da9573 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.710019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a1a53f1-9098-4ead-b7e5-409bcf5729dc tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 27.436s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.976499] env[62109]: DEBUG nova.compute.manager [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62109) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1086.067935] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1086.093376] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1086.093634] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1086.093937] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1086.093980] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1086.094134] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1086.094289] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1086.094505] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1086.094669] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1086.094839] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1086.095017] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1086.095218] env[62109]: DEBUG nova.virt.hardware [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1086.095620] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.095841] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.096050] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.096240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.096414] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.098636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8774c15b-d68a-4167-b7c7-26f3f87310dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.101993] env[62109]: INFO nova.compute.manager [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Terminating instance [ 1086.104087] env[62109]: DEBUG nova.compute.manager [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.104316] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1086.105052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfb1335-e47c-4cf6-b557-117c4f7ef03e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.111166] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf67dce3-f8dd-4ec4-be05-dc777c244bae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.117120] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1086.117640] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d764481-acc8-447c-8994-637b786dcbb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.129614] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1086.129614] env[62109]: value = "task-1117195" [ 1086.129614] env[62109]: _type = "Task" [ 1086.129614] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.141288] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.165645] env[62109]: INFO nova.compute.manager [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 24.98 seconds to build instance. [ 1086.423427] env[62109]: DEBUG nova.compute.manager [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Received event network-vif-plugged-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.423713] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] Acquiring lock "5308edf2-155c-452f-9240-45ff444826aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.423931] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] Lock "5308edf2-155c-452f-9240-45ff444826aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.424197] env[62109]: DEBUG oslo_concurrency.lockutils [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] Lock "5308edf2-155c-452f-9240-45ff444826aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.424415] env[62109]: DEBUG nova.compute.manager [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] No waiting events found dispatching network-vif-plugged-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1086.424637] env[62109]: WARNING nova.compute.manager [req-b5ac257b-2c36-4835-9921-80c520d5aa12 req-f126e043-fd5d-43e2-b229-bf38c0a68ae2 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Received unexpected event network-vif-plugged-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 for instance with vm_state building and task_state spawning. [ 1086.518576] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Successfully updated port: bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.639631] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117195, 'name': PowerOffVM_Task, 'duration_secs': 0.31912} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.640009] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.640272] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1086.640502] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1086.640760] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.640901] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1086.641028] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.642234] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2feedc18-9c65-4169-8797-bd642b5e3118 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.667564] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c0eec397-caf4-4299-a499-aba1c8800b42 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.491s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.711815] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1086.711972] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1086.712073] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleting the datastore file [datastore1] ad3d92ec-29a9-4f01-8117-47352c244e1e {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.712330] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad37b4eb-55bb-4204-9fe3-eb65cf182375 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.719856] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1086.719856] env[62109]: value = "task-1117197" [ 1086.719856] env[62109]: _type = "Task" [ 1086.719856] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.728436] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.027080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.027080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquired lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.027080] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1087.079307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.079632] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.147621] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Skipping network cache update for instance because it is being deleted. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1087.147778] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1087.182192] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.182532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.182841] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1087.183185] env[62109]: DEBUG nova.objects.instance [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lazy-loading 'info_cache' on Instance uuid 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.231204] env[62109]: DEBUG oslo_vmware.api [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145977} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.231821] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.232172] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1087.232256] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1087.232520] env[62109]: INFO nova.compute.manager [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1087.232765] env[62109]: DEBUG oslo.service.loopingcall [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.232934] env[62109]: DEBUG nova.compute.manager [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1087.233058] env[62109]: DEBUG nova.network.neutron [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1087.567186] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1087.582984] env[62109]: DEBUG nova.objects.instance [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'migration_context' on Instance uuid bc75898d-7856-4ecb-9640-ec30538fe90f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.604096] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2b8c65-2db7-4fd2-b34c-633c494645b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.611479] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Suspending the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1087.611740] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-89798fe9-61df-4b60-b18c-077bfb4e2736 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.619092] env[62109]: DEBUG oslo_vmware.api [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1087.619092] env[62109]: value = "task-1117198" [ 1087.619092] env[62109]: _type = "Task" [ 1087.619092] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.633838] env[62109]: DEBUG oslo_vmware.api [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117198, 'name': SuspendVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.739248] env[62109]: DEBUG nova.network.neutron [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Updating instance_info_cache with network_info: [{"id": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "address": "fa:16:3e:f2:0b:8b", "network": {"id": "544b3411-2223-444c-bdb4-6bbdd559bba6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-508095818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dc2c367ee14732b9f6666688a71ad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdccf7e2-0b", "ovs_interfaceid": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.010284] env[62109]: DEBUG nova.network.neutron [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.130115] env[62109]: DEBUG oslo_vmware.api [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117198, 'name': SuspendVM_Task} progress is 62%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.233146] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03217319-a332-4855-8b09-01ed9f4ee21b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.241157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e88c701-c37e-4c34-89a4-aa85d62834f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.244833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Releasing lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.245188] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Instance network_info: |[{"id": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "address": "fa:16:3e:f2:0b:8b", "network": {"id": "544b3411-2223-444c-bdb4-6bbdd559bba6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-508095818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dc2c367ee14732b9f6666688a71ad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdccf7e2-0b", "ovs_interfaceid": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1088.245646] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:0b:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.253791] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Creating folder: Project (b4dc2c367ee14732b9f6666688a71ad9). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1088.254844] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4094ebff-a2fd-4641-8b57-9cad24612817 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.282528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ac9356-a256-4b09-bfeb-758cbf60c855 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.290520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bc9778-1566-400a-b6ec-33a5b9adeb63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.296015] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Created folder: Project (b4dc2c367ee14732b9f6666688a71ad9) in parent group-v244329. [ 1088.296228] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Creating folder: Instances. Parent ref: group-v244549. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1088.296893] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0be81dba-254c-41ee-a090-0fdf3fc0bf83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.306910] env[62109]: DEBUG nova.compute.provider_tree [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.309386] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Created folder: Instances in parent group-v244549. [ 1088.309683] env[62109]: DEBUG oslo.service.loopingcall [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.310133] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1088.310377] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a7a4485-5442-46e3-adc7-8fc6fa3e95a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.331777] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.331777] env[62109]: value = "task-1117201" [ 1088.331777] env[62109]: _type = "Task" [ 1088.331777] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.339764] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117201, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.465291] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Received event network-changed-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1088.465291] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Refreshing instance network info cache due to event network-changed-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1088.465291] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquiring lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.465291] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquired lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.465291] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Refreshing network info cache for port bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1088.514526] env[62109]: INFO nova.compute.manager [-] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Took 1.28 seconds to deallocate network for instance. [ 1088.630413] env[62109]: DEBUG oslo_vmware.api [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117198, 'name': SuspendVM_Task, 'duration_secs': 0.952843} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.630708] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Suspended the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1088.630910] env[62109]: DEBUG nova.compute.manager [None req-48504c9e-813b-4ab1-be26-7de3c360b5ad tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1088.631723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1a30d7-2a9b-4600-b3fe-381456f58bed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.811627] env[62109]: DEBUG nova.scheduler.client.report [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.844711] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117201, 'name': CreateVM_Task, 'duration_secs': 0.395012} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.844882] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1088.845569] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.845740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.846096] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1088.846362] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ab7ec28-51d2-4ba8-a13d-c31b9aacf906 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.851800] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1088.851800] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fca696-107e-0308-456a-6c6895081873" [ 1088.851800] env[62109]: _type = "Task" [ 1088.851800] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.860299] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fca696-107e-0308-456a-6c6895081873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.902467] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.950792] env[62109]: DEBUG nova.compute.manager [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1089.022127] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.152712] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Updated VIF entry in instance network info cache for port bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1089.153093] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Updating instance_info_cache with network_info: [{"id": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "address": "fa:16:3e:f2:0b:8b", "network": {"id": "544b3411-2223-444c-bdb4-6bbdd559bba6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-508095818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dc2c367ee14732b9f6666688a71ad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdccf7e2-0b", "ovs_interfaceid": "bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.363703] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52fca696-107e-0308-456a-6c6895081873, 'name': SearchDatastore_Task, 'duration_secs': 0.01014} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.364014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.364254] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.364489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.364642] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.364828] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.365109] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a752f99b-3b92-46c2-8c8a-ff467e84eb07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.373179] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.373355] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1089.374278] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e16bf4c-44c2-46df-8c1c-564a26e28e05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.379356] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1089.379356] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d6ac70-9b1e-2281-9477-1db539340ee4" [ 1089.379356] env[62109]: _type = "Task" [ 1089.379356] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.386601] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d6ac70-9b1e-2281-9477-1db539340ee4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.405444] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.405796] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1089.405986] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.406161] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.406617] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.406822] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.407028] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.407193] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.407325] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1089.407472] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.471158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.655372] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Releasing lock "refresh_cache-5308edf2-155c-452f-9240-45ff444826aa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.655669] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1089.655890] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing instance network info cache due to event network-changed-a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1089.656128] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquiring lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.656281] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquired lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.656448] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Refreshing network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1089.822142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.742s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.827752] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.806s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.827987] env[62109]: DEBUG nova.objects.instance [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'resources' on Instance uuid ad3d92ec-29a9-4f01-8117-47352c244e1e {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.890294] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d6ac70-9b1e-2281-9477-1db539340ee4, 'name': SearchDatastore_Task, 'duration_secs': 0.008564} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.891201] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c08c077-f41e-4405-bf9c-c2fff1b7de5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.896978] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1089.896978] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522248de-f711-1f69-806a-16bcf69150b7" [ 1089.896978] env[62109]: _type = "Task" [ 1089.896978] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.906854] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522248de-f711-1f69-806a-16bcf69150b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.910349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.076334] env[62109]: INFO nova.compute.manager [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Resuming [ 1090.077072] env[62109]: DEBUG nova.objects.instance [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'flavor' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.347187] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updated VIF entry in instance network info cache for port a43481f3-cacf-4bd2-9e74-4ca60b37da80. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1090.347560] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [{"id": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "address": "fa:16:3e:5c:b7:79", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa43481f3-ca", "ovs_interfaceid": "a43481f3-cacf-4bd2-9e74-4ca60b37da80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.409309] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]522248de-f711-1f69-806a-16bcf69150b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010078} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.411734] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.412021] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5308edf2-155c-452f-9240-45ff444826aa/5308edf2-155c-452f-9240-45ff444826aa.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1090.412482] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edde32c8-7ffe-4d43-ba20-ef8831819be8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.420220] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1090.420220] env[62109]: value = "task-1117202" [ 1090.420220] env[62109]: _type = "Task" [ 1090.420220] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.432205] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.488497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d4c0b2-e001-462d-a9b7-94e4d6cfda25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.497494] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae58c8b-5c98-4b0b-8c8e-c28ff260f762 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.528284] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4672df6d-207a-4615-87c1-32908cc80c16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.536280] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89352c1-47ef-4f2b-95f1-89ba50341fdc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.549790] env[62109]: DEBUG nova.compute.provider_tree [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.850242] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Releasing lock "refresh_cache-751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.850536] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Received event network-changed-fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1090.850752] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Refreshing instance network info cache due to event network-changed-fad3649e-36ac-4e54-8d8b-9a77b8da03ee. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1090.850972] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquiring lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.851142] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Acquired lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.851308] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Refreshing network info cache for port fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1090.931345] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117202, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438092} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.931637] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 5308edf2-155c-452f-9240-45ff444826aa/5308edf2-155c-452f-9240-45ff444826aa.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1090.931862] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.932140] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80266b66-cc56-4046-9a67-983e460407a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.939910] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1090.939910] env[62109]: value = "task-1117203" [ 1090.939910] env[62109]: _type = "Task" [ 1090.939910] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.948746] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.052797] env[62109]: DEBUG nova.scheduler.client.report [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.086056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.086056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquired lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.086056] env[62109]: DEBUG nova.network.neutron [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1091.365185] env[62109]: INFO nova.compute.manager [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Swapping old allocation on dict_keys(['574e9717-c25e-453d-8028-45d9e2f95398']) held by migration 96835ba5-87a2-466a-a8f6-769d3f04e0cc for instance [ 1091.392482] env[62109]: DEBUG nova.scheduler.client.report [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Overwriting current allocation {'allocations': {'574e9717-c25e-453d-8028-45d9e2f95398': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 147}}, 'project_id': 'a363548894df47d5981199004e9884de', 'user_id': '5442deec924240babb834fc704d53cd0', 'consumer_generation': 1} on consumer bc75898d-7856-4ecb-9640-ec30538fe90f {{(pid=62109) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1091.451854] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063935} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.452117] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.452885] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d91d3c-d527-440c-8a40-12899e9341be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.476896] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 5308edf2-155c-452f-9240-45ff444826aa/5308edf2-155c-452f-9240-45ff444826aa.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.477188] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc2a8804-2c9a-4a21-a2d3-46dbe9223245 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.493599] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.493781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.493958] env[62109]: DEBUG nova.network.neutron [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1091.501850] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1091.501850] env[62109]: value = "task-1117204" [ 1091.501850] env[62109]: _type = "Task" [ 1091.501850] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.511404] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.559411] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.562593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.091s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.583094] env[62109]: INFO nova.scheduler.client.report [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted allocations for instance ad3d92ec-29a9-4f01-8117-47352c244e1e [ 1091.617301] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updated VIF entry in instance network info cache for port fad3649e-36ac-4e54-8d8b-9a77b8da03ee. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1091.617681] env[62109]: DEBUG nova.network.neutron [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [{"id": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "address": "fa:16:3e:6b:9d:5c", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfad3649e-36", "ovs_interfaceid": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.796036] env[62109]: DEBUG nova.network.neutron [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [{"id": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "address": "fa:16:3e:68:0e:6e", "network": {"id": "feb45222-861d-4499-8c29-03176662f1ef", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-854878036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "093c284d31de414cb583d501864456c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea956d1-ce", "ovs_interfaceid": "bea956d1-ceb2-4342-bef6-6a37fef7ec4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.014329] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.070041] env[62109]: INFO nova.compute.claims [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.094747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9500c00c-6af9-435e-ab7b-63a472f4507a tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "ad3d92ec-29a9-4f01-8117-47352c244e1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.999s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.121600] env[62109]: DEBUG oslo_concurrency.lockutils [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] Releasing lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.121858] env[62109]: DEBUG nova.compute.manager [req-3aad924e-00b1-4563-befd-a9d9b3d4afa0 req-15372742-a69f-43aa-b3e8-618a1d8c5184 service nova] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Received event network-vif-deleted-13c402cb-03d3-437a-b0c0-ef0bb1f76185 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.230826] env[62109]: DEBUG nova.network.neutron [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [{"id": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "address": "fa:16:3e:9b:4f:08", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b33420f-fd", "ovs_interfaceid": "8b33420f-fd6d-43fc-b4e1-141768c6024b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.298571] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Releasing lock "refresh_cache-0f7445fa-c48e-4e79-a01a-1f8f70072de4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.299594] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698a1993-3288-407f-98ea-ced9ebb16001 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.308141] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Resuming the VM {{(pid=62109) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1092.308565] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a8700f9-afbe-491e-9527-2f3c357f20f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.316967] env[62109]: DEBUG oslo_vmware.api [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1092.316967] env[62109]: value = "task-1117205" [ 1092.316967] env[62109]: _type = "Task" [ 1092.316967] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.326202] env[62109]: DEBUG oslo_vmware.api [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.515752] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117204, 'name': ReconfigVM_Task, 'duration_secs': 0.709397} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.516104] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 5308edf2-155c-452f-9240-45ff444826aa/5308edf2-155c-452f-9240-45ff444826aa.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.516835] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44793098-0f32-4cb9-b410-adaa55e4b6e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.524392] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1092.524392] env[62109]: value = "task-1117206" [ 1092.524392] env[62109]: _type = "Task" [ 1092.524392] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.533422] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117206, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.576387] env[62109]: INFO nova.compute.resource_tracker [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating resource usage from migration b2fc88b7-3fea-437f-a5a5-6588e5b8f93f [ 1092.721517] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167dfdf0-4851-4f32-941d-266db3e2a7dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.729450] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec14d3b-6ced-49dd-8d52-fefc516fc671 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.733537] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-bc75898d-7856-4ecb-9640-ec30538fe90f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.734434] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216241ff-77f6-426d-b845-2601e8d61ee8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.765977] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f726d-1204-43df-a60f-da7147e4d10f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.770025] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9464acb-9580-4dcb-a70f-358701b409b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.778578] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5f10e8-32ae-4bb3-b884-ec9f93f07723 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.799049] env[62109]: DEBUG nova.compute.provider_tree [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.828464] env[62109]: DEBUG oslo_vmware.api [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117205, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.038682] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117206, 'name': Rename_Task, 'duration_secs': 0.150797} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.039060] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1093.039377] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce35832f-3aa0-44eb-8b3d-2646379e178b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.049810] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1093.049810] env[62109]: value = "task-1117207" [ 1093.049810] env[62109]: _type = "Task" [ 1093.049810] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.060509] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.303422] env[62109]: DEBUG nova.scheduler.client.report [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1093.331728] env[62109]: DEBUG oslo_vmware.api [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117205, 'name': PowerOnVM_Task, 'duration_secs': 0.843201} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.332343] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Resumed the VM {{(pid=62109) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1093.332494] env[62109]: DEBUG nova.compute.manager [None req-ce2634b3-8810-4910-a3d8-5ea584829f90 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1093.336620] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0581a404-7c75-4125-bc5e-fa9a64498bfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.561132] env[62109]: DEBUG oslo_vmware.api [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117207, 'name': PowerOnVM_Task, 'duration_secs': 0.470531} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.561507] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1093.561706] env[62109]: INFO nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1093.561937] env[62109]: DEBUG nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1093.562773] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d430d0-f0fa-4581-b442-24d21fb3af2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.808886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.247s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.809186] env[62109]: INFO nova.compute.manager [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Migrating [ 1093.817339] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.907s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.817595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.817841] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.822812] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab16bfb6-a27f-46f3-8c2c-6881688b7723 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.842508] env[62109]: DEBUG nova.compute.manager [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1093.848052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4599bc2a-82f5-48aa-89b5-22e29a1f1c7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.877030] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1093.878181] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f8d55c6-49c6-44f6-a595-86d14adad585 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.881998] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1447c7c-5795-4d65-bd1e-110337befac9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.896373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc20a3ba-56c2-4f89-a732-c04ac10fa649 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.899748] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1093.899748] env[62109]: value = "task-1117208" [ 1093.899748] env[62109]: _type = "Task" [ 1093.899748] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.927824] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179668MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.928136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.928413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.936242] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.079588] env[62109]: INFO nova.compute.manager [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Took 12.29 seconds to build instance. [ 1094.333356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.333519] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquired lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.333688] env[62109]: DEBUG nova.network.neutron [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1094.372673] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.410307] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.581859] env[62109]: DEBUG oslo_concurrency.lockutils [None req-463c3efc-c368-4951-b59d-90b9afc7e343 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.801s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.910389] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117208, 'name': PowerOffVM_Task, 'duration_secs': 0.786383} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.910718] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1094.911399] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.911619] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.911779] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.911966] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.912133] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.912286] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.912491] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.912655] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.912823] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.912990] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.913183] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.918100] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a709493-4733-4e13-8751-c7b93def5cc5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.935957] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1094.935957] env[62109]: value = "task-1117209" [ 1094.935957] env[62109]: _type = "Task" [ 1094.935957] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.939459] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Applying migration context for instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea as it has an incoming, in-progress migration b2fc88b7-3fea-437f-a5a5-6588e5b8f93f. Migration status is pre-migrating {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1094.940692] env[62109]: INFO nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating resource usage from migration a0f33392-7def-460f-9d25-e2e9a51f7580 [ 1094.940833] env[62109]: INFO nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating resource usage from migration b2fc88b7-3fea-437f-a5a5-6588e5b8f93f [ 1094.957567] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance bc75898d-7856-4ecb-9640-ec30538fe90f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 5308edf2-155c-452f-9240-45ff444826aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Migration b2fc88b7-3fea-437f-a5a5-6588e5b8f93f is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Migration a0f33392-7def-460f-9d25-e2e9a51f7580 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.968055] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1095.078963] env[62109]: DEBUG nova.network.neutron [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [{"id": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "address": "fa:16:3e:6b:9d:5c", "network": {"id": "02d3202b-13ad-4f85-8a60-81e131867022", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-587695573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac84df552ee74053a00b8204aa781f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfad3649e-36", "ovs_interfaceid": "fad3649e-36ac-4e54-8d8b-9a77b8da03ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.110451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274bef21-d99a-4923-98cb-ebe0fac4712e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.119216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d834235-5a20-447c-a5e5-dfa5c5a61675 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.150476] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4431c082-8d0f-4563-8c38-1502af556b4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.158442] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5ba306-34af-4b6d-82a8-51b56c96e87c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.172034] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.173840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "5308edf2-155c-452f-9240-45ff444826aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.173990] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.174202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "5308edf2-155c-452f-9240-45ff444826aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.174390] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.174560] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.176392] env[62109]: INFO nova.compute.manager [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Terminating instance [ 1095.177987] env[62109]: DEBUG nova.compute.manager [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1095.178199] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1095.178960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34f9a0b-1b0a-4161-97c4-09bfa15130ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.187386] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1095.188028] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ae21676-e40b-4a5e-8c10-3d76ead379ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.194299] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1095.194299] env[62109]: value = "task-1117210" [ 1095.194299] env[62109]: _type = "Task" [ 1095.194299] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.203204] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.461388] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117209, 'name': ReconfigVM_Task, 'duration_secs': 0.186283} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.462608] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05391b9c-6393-490d-ba9c-bca89dd58d4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.494949] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.495287] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.495495] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.495749] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.495952] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.496173] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.496458] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.496687] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.496916] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.497156] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.497398] env[62109]: DEBUG nova.virt.hardware [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.498454] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f49d428-374d-48e8-94b4-441b0b472c81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.505794] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1095.505794] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521f487d-be3d-f1ca-d9a0-165a84a46b52" [ 1095.505794] env[62109]: _type = "Task" [ 1095.505794] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.517628] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]521f487d-be3d-f1ca-d9a0-165a84a46b52, 'name': SearchDatastore_Task, 'duration_secs': 0.008693} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.525602] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1095.525939] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6c778c2-33e1-4b22-80eb-4a11899bb9c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.551699] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1095.551699] env[62109]: value = "task-1117211" [ 1095.551699] env[62109]: _type = "Task" [ 1095.551699] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.563135] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.583093] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Releasing lock "refresh_cache-c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.675483] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.705255] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117210, 'name': PowerOffVM_Task, 'duration_secs': 0.180218} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.705595] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1095.705715] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1095.705969] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7cf909a-2a20-4b73-b889-daffbec1bbfb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.769073] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1095.769346] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1095.769541] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Deleting the datastore file [datastore2] 5308edf2-155c-452f-9240-45ff444826aa {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.769814] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-947b06f4-efd0-4799-9dc6-b64787c88043 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.776597] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for the task: (returnval){ [ 1095.776597] env[62109]: value = "task-1117213" [ 1095.776597] env[62109]: _type = "Task" [ 1095.776597] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.785933] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.063376] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117211, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.180445] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1096.180700] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.252s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.180976] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.808s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.286701] env[62109]: DEBUG oslo_vmware.api [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Task: {'id': task-1117213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155452} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.286974] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.287203] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1096.287395] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1096.287585] env[62109]: INFO nova.compute.manager [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1096.287820] env[62109]: DEBUG oslo.service.loopingcall [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1096.288014] env[62109]: DEBUG nova.compute.manager [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1096.288118] env[62109]: DEBUG nova.network.neutron [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1096.537669] env[62109]: DEBUG nova.compute.manager [req-9527d034-c636-42e4-9c4f-0fff7cc2fcba req-79c7325c-94a7-433f-a0f8-a532ca2ed3e1 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Received event network-vif-deleted-bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.538501] env[62109]: INFO nova.compute.manager [req-9527d034-c636-42e4-9c4f-0fff7cc2fcba req-79c7325c-94a7-433f-a0f8-a532ca2ed3e1 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Neutron deleted interface bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8; detaching it from the instance and deleting it from the info cache [ 1096.539177] env[62109]: DEBUG nova.network.neutron [req-9527d034-c636-42e4-9c4f-0fff7cc2fcba req-79c7325c-94a7-433f-a0f8-a532ca2ed3e1 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.562925] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117211, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.686349] env[62109]: INFO nova.compute.claims [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.012045] env[62109]: DEBUG nova.network.neutron [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.042686] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5b16ee8-a9f7-43fd-a0cc-4fd3e16d2a33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.052283] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c38cf47-9991-453f-9ed1-e8355e72dcab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.071996] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117211, 'name': ReconfigVM_Task, 'duration_secs': 1.188867} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.072304] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1097.073103] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b25ff6-9c7f-40cd-9902-aa63c02127c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.098718] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.108658] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3f48464-72cc-49c8-b0c9-3267d5b69e3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.122092] env[62109]: DEBUG nova.compute.manager [req-9527d034-c636-42e4-9c4f-0fff7cc2fcba req-79c7325c-94a7-433f-a0f8-a532ca2ed3e1 service nova] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Detach interface failed, port_id=bdccf7e2-0bd5-4d32-96fc-43ba2bdd31a8, reason: Instance 5308edf2-155c-452f-9240-45ff444826aa could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1097.123746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9601d04c-94f9-45e2-9140-6e3d49df7bcd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.144924] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1097.148732] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1097.148732] env[62109]: value = "task-1117214" [ 1097.148732] env[62109]: _type = "Task" [ 1097.148732] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.158889] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117214, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.193155] env[62109]: INFO nova.compute.resource_tracker [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating resource usage from migration a0f33392-7def-460f-9d25-e2e9a51f7580 [ 1097.442655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad5a800-5129-4001-94c7-2e65876e4b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.450811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5cde3e-f734-4266-91d9-0f8a664467b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.483106] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f1bfeb-1dc1-4a19-93b3-7dd80cbeb97e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.491230] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11aad9e6-d69c-4a82-9c47-ed86178d0414 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.505195] env[62109]: DEBUG nova.compute.provider_tree [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.514635] env[62109]: INFO nova.compute.manager [-] [instance: 5308edf2-155c-452f-9240-45ff444826aa] Took 1.23 seconds to deallocate network for instance. [ 1097.651717] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1097.652054] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46ebf8d1-52a1-4181-8043-414386e574d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.663577] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117214, 'name': ReconfigVM_Task, 'duration_secs': 0.341902} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.664852] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to attach disk [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f/bc75898d-7856-4ecb-9640-ec30538fe90f.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1097.665281] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1097.665281] env[62109]: value = "task-1117215" [ 1097.665281] env[62109]: _type = "Task" [ 1097.665281] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.666031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4c0f2c-a2ca-4a4c-94d8-ff329c2dff57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.676175] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.694076] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd64f327-e238-4656-b600-a48ee68d0f91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.715355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d47279-f700-4c5b-a2a4-7f3d778b3439 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.736995] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c09d5d0-6d3c-4526-8480-a5737a690baf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.745169] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1097.745427] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d0bbccb-580e-4176-a716-e34a9c9f28a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.752485] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1097.752485] env[62109]: value = "task-1117216" [ 1097.752485] env[62109]: _type = "Task" [ 1097.752485] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.761238] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.008884] env[62109]: DEBUG nova.scheduler.client.report [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1098.020815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.178760] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.263461] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.514057] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.333s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.514347] env[62109]: INFO nova.compute.manager [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Migrating [ 1098.521525] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.501s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.521806] env[62109]: DEBUG nova.objects.instance [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lazy-loading 'resources' on Instance uuid 5308edf2-155c-452f-9240-45ff444826aa {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.680674] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117215, 'name': PowerOffVM_Task, 'duration_secs': 0.948001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.681023] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1098.681241] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1098.767473] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.924334] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.924647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.924831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.925028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.925209] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.927833] env[62109]: INFO nova.compute.manager [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Terminating instance [ 1098.930199] env[62109]: DEBUG nova.compute.manager [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1098.930503] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1098.931484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903d4e73-dbb8-4dc5-9d6a-c3055fd47370 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.941410] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1098.941410] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-176bc24c-3544-4c7f-8da3-340b16197d1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.949116] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1098.949116] env[62109]: value = "task-1117217" [ 1098.949116] env[62109]: _type = "Task" [ 1098.949116] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.960860] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.034133] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.034352] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.034534] env[62109]: DEBUG nova.network.neutron [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1099.188161] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1099.188428] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1099.188580] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.188798] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1099.189059] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.189278] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1099.189527] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1099.189649] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1099.189863] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1099.190048] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1099.190240] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1099.196242] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f27b5085-15c6-4991-9ddc-a55ef0475e66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.207127] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d1e292-2016-4596-b462-361a339c876f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.216325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cb0d6c-5432-42c2-80ec-fc4c63bb53b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.219491] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1099.219491] env[62109]: value = "task-1117218" [ 1099.219491] env[62109]: _type = "Task" [ 1099.219491] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.248966] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df129ef5-da74-4688-ba0b-c1a3fdb7da5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.255391] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.264236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296cc082-cf06-4d6c-8d87-3dd8f0f17102 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.271071] env[62109]: DEBUG oslo_vmware.api [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117216, 'name': PowerOnVM_Task, 'duration_secs': 1.322622} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.271752] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1099.283236] env[62109]: DEBUG nova.compute.provider_tree [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.460627] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117217, 'name': PowerOffVM_Task, 'duration_secs': 0.223135} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.460627] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1099.460806] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1099.460937] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fcfabcf-c6dd-415d-9806-ff94dfca20be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.640398] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1099.640718] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1099.640911] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleting the datastore file [datastore1] 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1099.641200] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03ad9146-67b4-40f4-b062-27db3d587c4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.646089] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.649460] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for the task: (returnval){ [ 1099.649460] env[62109]: value = "task-1117220" [ 1099.649460] env[62109]: _type = "Task" [ 1099.649460] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.657900] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.732556] env[62109]: DEBUG oslo_vmware.api [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117218, 'name': ReconfigVM_Task, 'duration_secs': 0.135173} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.732924] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1099.761230] env[62109]: DEBUG nova.network.neutron [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.790221] env[62109]: DEBUG nova.scheduler.client.report [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1100.156655] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.157351] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.157547] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.157704] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.157845] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1100.158013] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.158146] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Cleaning up deleted instances {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1100.162879] env[62109]: DEBUG oslo_vmware.api [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Task: {'id': task-1117220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147509} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.163455] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.163661] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1100.163842] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1100.164030] env[62109]: INFO nova.compute.manager [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1100.164282] env[62109]: DEBUG oslo.service.loopingcall [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.164480] env[62109]: DEBUG nova.compute.manager [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1100.164578] env[62109]: DEBUG nova.network.neutron [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1100.239422] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1100.239691] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1100.239854] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.240052] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1100.240212] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.241171] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1100.241486] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1100.241694] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1100.241873] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1100.242554] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1100.242554] env[62109]: DEBUG nova.virt.hardware [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.264049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.300105] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.318874] env[62109]: INFO nova.scheduler.client.report [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Deleted allocations for instance 5308edf2-155c-452f-9240-45ff444826aa [ 1100.337031] env[62109]: INFO nova.compute.manager [None req-3ab43d53-1b5d-4f0c-a4e3-50e07ba8d327 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance to original state: 'active' [ 1100.450692] env[62109]: DEBUG nova.compute.manager [req-b754278b-4829-4a29-9770-272a42f9d0c5 req-73fef91b-563f-4cf2-ac4c-902095909544 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Received event network-vif-deleted-bea956d1-ceb2-4342-bef6-6a37fef7ec4f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1100.450692] env[62109]: INFO nova.compute.manager [req-b754278b-4829-4a29-9770-272a42f9d0c5 req-73fef91b-563f-4cf2-ac4c-902095909544 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Neutron deleted interface bea956d1-ceb2-4342-bef6-6a37fef7ec4f; detaching it from the instance and deleting it from the info cache [ 1100.450692] env[62109]: DEBUG nova.network.neutron [req-b754278b-4829-4a29-9770-272a42f9d0c5 req-73fef91b-563f-4cf2-ac4c-902095909544 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.677076] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] There are 54 instances to clean {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1100.677076] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: ad3d92ec-29a9-4f01-8117-47352c244e1e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1100.751263] env[62109]: ERROR nova.compute.manager [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Setting instance vm_state to ERROR: AttributeError: 'NoneType' object has no attribute 'key' [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Traceback (most recent call last): [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] yield [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/compute/manager.py", line 6105, in _resize_instance [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] disk_info = self.driver.migrate_disk_and_power_off( [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] return self._vmops.migrate_disk_and_power_off(context, instance, [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] disk_key = device.key [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] AttributeError: 'NoneType' object has no attribute 'key' [ 1100.751263] env[62109]: ERROR nova.compute.manager [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] [ 1100.828869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f11a09ea-0711-4220-8da8-e3139a9c6ef1 tempest-ServerAddressesNegativeTestJSON-1340596340 tempest-ServerAddressesNegativeTestJSON-1340596340-project-member] Lock "5308edf2-155c-452f-9240-45ff444826aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.655s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.928039] env[62109]: DEBUG nova.network.neutron [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.953104] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51d58b40-d410-4b3b-822d-ff0ba8ef54ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.966262] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8ee9b9-5f00-4cd6-92f7-3e05e73afe1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.996050] env[62109]: DEBUG nova.compute.manager [req-b754278b-4829-4a29-9770-272a42f9d0c5 req-73fef91b-563f-4cf2-ac4c-902095909544 service nova] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Detach interface failed, port_id=bea956d1-ceb2-4342-bef6-6a37fef7ec4f, reason: Instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1101.180307] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1f346681-b63f-4587-808c-bde4f0ba5831] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.271443] env[62109]: INFO nova.compute.manager [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Swapping old allocation on dict_keys(['574e9717-c25e-453d-8028-45d9e2f95398']) held by migration b2fc88b7-3fea-437f-a5a5-6588e5b8f93f for instance [ 1101.301490] env[62109]: DEBUG nova.scheduler.client.report [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Overwriting current allocation {'allocations': {'574e9717-c25e-453d-8028-45d9e2f95398': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 150}}, 'project_id': 'ac84df552ee74053a00b8204aa781f3b', 'user_id': '67a1245dbb50458ebda1a0a350def68b', 'consumer_generation': 1} on consumer c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea {{(pid=62109) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1101.430628] env[62109]: INFO nova.compute.manager [-] [instance: 0f7445fa-c48e-4e79-a01a-1f8f70072de4] Took 1.27 seconds to deallocate network for instance. [ 1101.685407] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 42e26a9c-fdba-4d5c-86b1-146fce2e6b23] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.779920] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1468a03-d8cf-4a10-8e1d-7b39d4b3907e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.807446] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1101.940020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.940020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.940020] env[62109]: DEBUG nova.objects.instance [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lazy-loading 'resources' on Instance uuid 0f7445fa-c48e-4e79-a01a-1f8f70072de4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.981010] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.981292] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.981605] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.981850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.982052] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.984497] env[62109]: INFO nova.compute.manager [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Terminating instance [ 1101.986796] env[62109]: DEBUG nova.compute.manager [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1101.987022] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1101.987315] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb8bea71-a526-4392-82e6-b9514d7fcfe8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.997666] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1101.997666] env[62109]: value = "task-1117221" [ 1101.997666] env[62109]: _type = "Task" [ 1101.997666] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.012998] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1102.013284] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1102.013486] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244530', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'name': 'volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea', 'attached_at': '', 'detached_at': '', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'serial': '35d2eab5-8592-4a6e-b3bb-80c16e77808d'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1102.014750] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e922627-50dc-4e46-ab35-1525c9e9dfad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.035232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab2e0ea-0557-41c4-a081-bc33a1d36ab9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.043359] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9303ffa4-7dcb-4aaf-9d67-d7ff7b6886d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.062058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777c4868-9a9a-46f1-877d-2714d33f3d0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.080554] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] The volume has not been displaced from its original location: [datastore1] volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d/volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1102.085919] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1102.086280] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f83af53-1036-4a0d-a47d-6da8a180413f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.105762] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1102.105762] env[62109]: value = "task-1117222" [ 1102.105762] env[62109]: _type = "Task" [ 1102.105762] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.115653] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.187365] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: d9a02690-0e85-4a61-a794-e9a852ce77d7] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.314604] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1102.314931] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2873c757-351f-4a22-bb90-b71200f0c610 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.326916] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1102.326916] env[62109]: value = "task-1117223" [ 1102.326916] env[62109]: _type = "Task" [ 1102.326916] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.336756] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.495932] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.496306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.496579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.496812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.497017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.504022] env[62109]: INFO nova.compute.manager [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Terminating instance [ 1102.508872] env[62109]: DEBUG nova.compute.manager [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1102.509324] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1102.510223] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f89da62a-0f07-4caf-82c8-07c647657e9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.522473] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1102.522473] env[62109]: value = "task-1117224" [ 1102.522473] env[62109]: _type = "Task" [ 1102.522473] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.536099] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.597100] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fb358e-521d-4d15-bcf3-5c0d98bd8d06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.604576] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0f22fc-401f-4595-a10e-dbee303c48a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.617637] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117222, 'name': ReconfigVM_Task, 'duration_secs': 0.364257} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.646179] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1102.653572] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9abc4f2b-ca3f-4ff4-974a-e43668147f91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.662503] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91696ac-b69b-4ebd-a0bf-03eabc3cda11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.673427] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f58efa-b593-46e6-ab8c-49154c0b7758 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.677538] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1102.677538] env[62109]: value = "task-1117225" [ 1102.677538] env[62109]: _type = "Task" [ 1102.677538] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.689556] env[62109]: DEBUG nova.compute.provider_tree [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.691457] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 12288104-483b-4bb4-9e33-05bf5d7be3a8] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.697628] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117225, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.838330] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117223, 'name': PowerOffVM_Task, 'duration_secs': 0.225329} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.838654] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1102.838839] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1102.872199] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.035290] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117224, 'name': PowerOffVM_Task, 'duration_secs': 0.229581} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.035567] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1103.035773] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1103.035992] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244529', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'name': 'volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'bc75898d-7856-4ecb-9640-ec30538fe90f', 'attached_at': '2024-10-03T08:01:22.000000', 'detached_at': '', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'serial': '321c4adf-adc3-49cc-a7cc-31ff829b0f66'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1103.036822] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d3aa97-394a-41d4-8d39-be7dbd09eefb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.059134] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78cf0b9-8f02-412b-a060-b238a4f0103b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.066449] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134442bb-1729-4b4f-bb97-2138ca546feb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.088713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e438c6a-573c-4c02-a244-14b26e6e3ecc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.103530] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] The volume has not been displaced from its original location: [datastore1] volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66/volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1103.108634] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1103.108919] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5ca438d-6b2f-4757-afc6-03e341b61d82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.128524] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1103.128524] env[62109]: value = "task-1117226" [ 1103.128524] env[62109]: _type = "Task" [ 1103.128524] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.136471] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.187411] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117225, 'name': ReconfigVM_Task, 'duration_secs': 0.123985} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.187757] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244530', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'name': 'volume-35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea', 'attached_at': '', 'detached_at': '', 'volume_id': '35d2eab5-8592-4a6e-b3bb-80c16e77808d', 'serial': '35d2eab5-8592-4a6e-b3bb-80c16e77808d'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1103.188066] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1103.189029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5d806a-6c3a-403d-8551-0a50df2d9677 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.196521] env[62109]: DEBUG nova.scheduler.client.report [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1103.199648] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1103.200127] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: f5b81761-6db9-4260-8876-435bac74b027] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1103.201848] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c5ab8a9-733a-4670-9f6f-5815570ca2f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.282663] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1103.282950] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1103.283161] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore1] c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.283437] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c18a77eb-a484-4f09-9068-2b1dcec739da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.290490] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1103.290490] env[62109]: value = "task-1117228" [ 1103.290490] env[62109]: _type = "Task" [ 1103.290490] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.299363] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.345886] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.346156] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.346325] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.346634] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.346674] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.346803] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.347022] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.347193] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.347428] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.347637] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.347816] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.353198] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5bc507d-ebe2-4f47-891d-6cc421d43ab2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.369921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.370292] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1103.370292] env[62109]: value = "task-1117229" [ 1103.370292] env[62109]: _type = "Task" [ 1103.370292] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.386278] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.639505] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117226, 'name': ReconfigVM_Task, 'duration_secs': 0.221451} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.639809] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1103.644522] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b17dffee-cf55-4e81-811e-21f8e9451e53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.660944] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1103.660944] env[62109]: value = "task-1117230" [ 1103.660944] env[62109]: _type = "Task" [ 1103.660944] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.669820] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.704673] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 2fddcd6c-241e-4591-acec-12487909355c] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1103.706974] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.710336] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.838s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.801404] env[62109]: DEBUG oslo_vmware.api [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131002} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.801640] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.801839] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1103.802028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1103.802215] env[62109]: INFO nova.compute.manager [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 1.82 seconds to destroy the instance on the hypervisor. [ 1103.802463] env[62109]: DEBUG oslo.service.loopingcall [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1103.802660] env[62109]: DEBUG nova.compute.manager [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1103.802756] env[62109]: DEBUG nova.network.neutron [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1103.830080] env[62109]: INFO nova.scheduler.client.report [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Deleted allocations for instance 0f7445fa-c48e-4e79-a01a-1f8f70072de4 [ 1103.886429] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117229, 'name': ReconfigVM_Task, 'duration_secs': 0.244774} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.886543] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1103.961815] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f32fe50-ba38-41c8-ad27-270ba1fa7203 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.971461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074ee2c1-a55f-4b17-bd2f-34c9ec07f65f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.001546] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d01e7c-ef87-4623-94da-d79e9f5a9ba4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.010055] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51b61cf-2e3b-459c-b07b-25eeac8bf7f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.023801] env[62109]: DEBUG nova.compute.provider_tree [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.170590] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117230, 'name': ReconfigVM_Task, 'duration_secs': 0.196972} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.170975] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244529', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'name': 'volume-321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'bc75898d-7856-4ecb-9640-ec30538fe90f', 'attached_at': '2024-10-03T08:01:22.000000', 'detached_at': '', 'volume_id': '321c4adf-adc3-49cc-a7cc-31ff829b0f66', 'serial': '321c4adf-adc3-49cc-a7cc-31ff829b0f66'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1104.171842] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1104.172624] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28b8184-da61-41cc-a7bf-173272e5d142 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.179677] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1104.180508] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d61ecb0-0a73-4554-b904-f374f02b8b5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.211018] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: f72ca981-1bba-44d9-854f-7677f1a0c764] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.242265] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1104.242265] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1104.242484] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleting the datastore file [datastore1] bc75898d-7856-4ecb-9640-ec30538fe90f {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.242679] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a790b944-6d72-4c37-a92a-5219e7bed45c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.252283] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1104.252283] env[62109]: value = "task-1117232" [ 1104.252283] env[62109]: _type = "Task" [ 1104.252283] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.260781] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.341061] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ec6fd8a6-e7d8-4a5f-b28e-12c43d443ad7 tempest-ServersNegativeTestJSON-943518374 tempest-ServersNegativeTestJSON-943518374-project-member] Lock "0f7445fa-c48e-4e79-a01a-1f8f70072de4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.416s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.393392] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1104.393758] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1104.393977] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.394938] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1104.394938] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.394938] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1104.394938] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1104.395210] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1104.395210] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1104.395566] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1104.395771] env[62109]: DEBUG nova.virt.hardware [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.401813] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1104.402132] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28884b16-07fd-44d4-9290-c668bbf2dffc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.421771] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1104.421771] env[62109]: value = "task-1117233" [ 1104.421771] env[62109]: _type = "Task" [ 1104.421771] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.431253] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.527037] env[62109]: DEBUG nova.scheduler.client.report [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1104.712617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.712905] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.713143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.713338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.713514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.715347] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: b5410f60-c5fb-4325-8d42-8745c310a6ca] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.721266] env[62109]: INFO nova.compute.manager [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Terminating instance [ 1104.723138] env[62109]: DEBUG nova.compute.manager [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1104.723338] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1104.724201] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5547396f-2289-46cb-a1f2-847c827ec885 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.737811] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1104.738034] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd33aae6-3218-4219-b9cc-262575cbd98c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.745412] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1104.745412] env[62109]: value = "task-1117234" [ 1104.745412] env[62109]: _type = "Task" [ 1104.745412] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.754378] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.765892] env[62109]: DEBUG oslo_vmware.api [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143148} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.766179] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1104.766358] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1104.766570] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1104.766814] env[62109]: INFO nova.compute.manager [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1104.767090] env[62109]: DEBUG oslo.service.loopingcall [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1104.767292] env[62109]: DEBUG nova.compute.manager [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1104.767389] env[62109]: DEBUG nova.network.neutron [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1104.938757] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117233, 'name': ReconfigVM_Task, 'duration_secs': 0.184001} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.940134] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1104.941630] env[62109]: DEBUG nova.compute.manager [req-dd8f2415-7ca5-44b8-ac7a-bf9c82ebe009 req-f47cc237-5385-4171-9a0d-8fbb5fc73df6 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Received event network-vif-deleted-fad3649e-36ac-4e54-8d8b-9a77b8da03ee {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1104.941855] env[62109]: INFO nova.compute.manager [req-dd8f2415-7ca5-44b8-ac7a-bf9c82ebe009 req-f47cc237-5385-4171-9a0d-8fbb5fc73df6 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Neutron deleted interface fad3649e-36ac-4e54-8d8b-9a77b8da03ee; detaching it from the instance and deleting it from the info cache [ 1104.942048] env[62109]: DEBUG nova.network.neutron [req-dd8f2415-7ca5-44b8-ac7a-bf9c82ebe009 req-f47cc237-5385-4171-9a0d-8fbb5fc73df6 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.943655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0eaede3-0492-4c9a-b836-75eec0cf9d9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.967765] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.968749] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1ffa92c-265d-4c67-b301-3c424d53efcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.989017] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1104.989017] env[62109]: value = "task-1117235" [ 1104.989017] env[62109]: _type = "Task" [ 1104.989017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.998540] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117235, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.032845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.322s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.033183] env[62109]: INFO nova.compute.manager [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Successfully reverted task state from resize_migrating on failure for instance. [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server [None req-bfb980ba-fa88-427d-9c72-03eaf80d3327 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Exception during message handling: AttributeError: 'NoneType' object has no attribute 'key' [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6071, in resize_instance [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server raise self.value [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6068, in resize_instance [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6105, in _resize_instance [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server return self._vmops.migrate_disk_and_power_off(context, instance, [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server disk_key = device.key [ 1105.042754] env[62109]: ERROR oslo_messaging.rpc.server AttributeError: 'NoneType' object has no attribute 'key' [ 1105.044395] env[62109]: ERROR oslo_messaging.rpc.server [ 1105.224115] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dce54763-ad3a-40d3-8f72-f0a1aefaf086] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1105.261535] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117234, 'name': PowerOffVM_Task, 'duration_secs': 0.213967} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.263802] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1105.263802] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1105.263802] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e21676ee-a64c-49f9-ba9b-cef12da23703 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.330677] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1105.331210] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1105.331210] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore2] e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.331488] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b8723e8-0463-4669-97bc-1045936c4c0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.339930] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1105.339930] env[62109]: value = "task-1117237" [ 1105.339930] env[62109]: _type = "Task" [ 1105.339930] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.349978] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.399183] env[62109]: DEBUG nova.network.neutron [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.449106] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a5723a0-f425-42db-93f9-da58d02eb095 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.460290] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5911d81b-a63e-429a-a002-91a79bcf4d0e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.497924] env[62109]: DEBUG nova.compute.manager [req-dd8f2415-7ca5-44b8-ac7a-bf9c82ebe009 req-f47cc237-5385-4171-9a0d-8fbb5fc73df6 service nova] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Detach interface failed, port_id=fad3649e-36ac-4e54-8d8b-9a77b8da03ee, reason: Instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1105.510045] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117235, 'name': ReconfigVM_Task, 'duration_secs': 0.271934} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.510045] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af/93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.510045] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1105.727326] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dfebeee8-06be-424b-89b0-7c1a3d4703eb] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1105.855713] env[62109]: DEBUG oslo_vmware.api [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137936} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.855713] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.855713] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1105.855713] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1105.855713] env[62109]: INFO nova.compute.manager [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1105.855713] env[62109]: DEBUG oslo.service.loopingcall [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.855713] env[62109]: DEBUG nova.compute.manager [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1105.855713] env[62109]: DEBUG nova.network.neutron [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.903708] env[62109]: INFO nova.compute.manager [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 2.10 seconds to deallocate network for instance. [ 1106.016132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6706be-7ecb-4aaf-a293-66ef37265e53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.051116] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187e3321-eac5-4764-8eef-4de98286865e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.072969] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1106.233514] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 35a13db2-f645-4634-86e0-7e9a6a24fc66] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.255359] env[62109]: DEBUG nova.network.neutron [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.459242] env[62109]: INFO nova.compute.manager [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 0.56 seconds to detach 1 volumes for instance. [ 1106.462646] env[62109]: DEBUG nova.compute.manager [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deleting volume: 35d2eab5-8592-4a6e-b3bb-80c16e77808d {{(pid=62109) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1106.587517] env[62109]: DEBUG nova.network.neutron [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.698218] env[62109]: DEBUG nova.network.neutron [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Port 14e9f4a1-8980-4de2-88f7-dd0162687351 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1106.736685] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 50c93e9e-5af6-489e-ac8a-29b8a6777a4e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.758475] env[62109]: INFO nova.compute.manager [-] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Took 1.99 seconds to deallocate network for instance. [ 1106.980862] env[62109]: DEBUG nova.compute.manager [req-493f3a44-e9b1-4673-9c48-8cc6c7be5233 req-c77141d8-1274-4cc9-b469-0aeb70605b2d service nova] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Received event network-vif-deleted-8b33420f-fd6d-43fc-b4e1-141768c6024b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1106.981238] env[62109]: DEBUG nova.compute.manager [req-493f3a44-e9b1-4673-9c48-8cc6c7be5233 req-c77141d8-1274-4cc9-b469-0aeb70605b2d service nova] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Received event network-vif-deleted-6228f2fe-46b3-4b2e-ac77-0abf0078c9ea {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.024472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.024472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.024472] env[62109]: DEBUG nova.objects.instance [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'resources' on Instance uuid c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.090858] env[62109]: INFO nova.compute.manager [-] [instance: e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7] Took 1.24 seconds to deallocate network for instance. [ 1107.240034] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: a197a73e-32bc-45b0-ae6f-5275cf74285b] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.310428] env[62109]: INFO nova.compute.manager [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: bc75898d-7856-4ecb-9640-ec30538fe90f] Took 0.55 seconds to detach 1 volumes for instance. [ 1107.598835] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.670790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d7c520-9bb9-4adf-921b-47075f6400c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.679395] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8744e8b2-af2b-4b0f-9073-5e9b6d8398bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.719285] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7ca6cc-90cc-4317-be0c-cc2f36dd0e0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.728647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.728949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.730108] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.736706] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9581a40-f5b2-47f5-8bc9-4a6c5253c5cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.751462] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 6b5a009e-28f5-4be7-8641-089abe359954] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.753715] env[62109]: DEBUG nova.compute.provider_tree [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.816958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.820965] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.821240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.256209] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 414ac48f-68bc-4d37-98c0-4bcc9f7f37c5] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.258542] env[62109]: DEBUG nova.scheduler.client.report [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.323208] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1108.762918] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: e7e232c4-a2cb-44eb-8ee3-11fc12ee152a] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.765269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.767615] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.169s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.767842] env[62109]: DEBUG nova.objects.instance [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'resources' on Instance uuid e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.769464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.769684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.769822] env[62109]: DEBUG nova.network.neutron [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1108.790080] env[62109]: INFO nova.scheduler.client.report [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted allocations for instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea [ 1108.842489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.268486] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 5842e112-d3ef-4ce9-91cc-198e68d12422] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.296384] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7aa8b062-f34a-4923-ab85-35f1bab26927 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.315s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.297353] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.928s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.297652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.297926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.298261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.302897] env[62109]: INFO nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Terminating instance [ 1109.307024] env[62109]: DEBUG nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1109.307308] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f657008-218b-4930-9e19-201c5556967d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.319589] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df082ed0-51ec-495a-8013-257fd6db7d0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.352697] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea could not be found. [ 1109.352940] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1109.355433] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-045077a5-0348-4eb5-acf6-5e255f9d64d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.366953] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319c93c5-6082-40cf-bc01-8256fe2fc627 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.397626] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea could not be found. [ 1109.397837] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1109.398031] env[62109]: INFO nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1109.398279] env[62109]: DEBUG oslo.service.loopingcall [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.398792] env[62109]: DEBUG nova.compute.manager [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1109.398963] env[62109]: DEBUG nova.network.neutron [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1109.401466] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aa2172-09ae-4d21-9102-d6c8206f9fd3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.408484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ae82c8-5bb0-4314-9a7d-0cf5bb9d4a3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.442981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f211f1-8700-40c9-bef2-776e15303781 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.451209] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e948baf-2e38-485e-86f0-786d58611334 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.465249] env[62109]: DEBUG nova.compute.provider_tree [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.535623] env[62109]: DEBUG nova.network.neutron [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.772389] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 39c17e34-c8c0-4a66-8d22-717efcb984bc] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.945936] env[62109]: DEBUG nova.network.neutron [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.967517] env[62109]: DEBUG nova.scheduler.client.report [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1110.038709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.276480] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 128ae6c9-1f82-4c67-83be-42cb554c2fd3] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.448862] env[62109]: INFO nova.compute.manager [-] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 1.05 seconds to deallocate network for instance. [ 1110.463255] env[62109]: WARNING nova.volume.cinder [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Attachment 06d4260e-5285-4e0b-a2bc-b1222f9126c6 does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = 06d4260e-5285-4e0b-a2bc-b1222f9126c6. (HTTP 404) (Request-ID: req-03144345-4ab4-44d3-8b0c-caf0bfb2ddf5) [ 1110.463547] env[62109]: INFO nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Took 0.01 seconds to detach 1 volumes for instance. [ 1110.465663] env[62109]: DEBUG nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Deleting volume: 35d2eab5-8592-4a6e-b3bb-80c16e77808d {{(pid=62109) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1110.472177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.474122] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.657s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.474349] env[62109]: DEBUG nova.objects.instance [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'resources' on Instance uuid bc75898d-7856-4ecb-9640-ec30538fe90f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.479918] env[62109]: WARNING nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Failed to delete volume: 35d2eab5-8592-4a6e-b3bb-80c16e77808d due to Volume 35d2eab5-8592-4a6e-b3bb-80c16e77808d could not be found.: nova.exception.VolumeNotFound: Volume 35d2eab5-8592-4a6e-b3bb-80c16e77808d could not be found. [ 1110.494637] env[62109]: INFO nova.scheduler.client.report [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted allocations for instance e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7 [ 1110.558863] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdb4c92-504e-46e4-9ab7-66f85e8c8944 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.579781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e3a229-e39b-427e-88c3-cbb0893f067c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.586948] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1110.780133] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: f91f4482-b18d-4883-9f6b-3bc5a386eedd] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.984443] env[62109]: INFO nova.compute.manager [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea] Instance disappeared during terminate [ 1110.984443] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3e4af113-19d0-4e2d-899e-8236143ebf70 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "c0a7ed4e-41b5-4b71-b3f7-92d82bf2c0ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.687s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.002139] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e630733a-19ee-4461-9633-782a78aef702 tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "e2a6ec26-8e00-45f7-b109-86bd8ca3c7e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.289s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.074477] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8953354-f582-4298-a7d1-b7d3604f0a6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.082991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbb92ea-ff2a-403e-81c0-1e0e306ae52f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.114204] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1111.114725] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b946cddf-c704-4fb0-838b-73c0d6f95a1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.116779] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf7e473-708e-4370-866a-7dfc9ab5d626 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.124772] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb12b39-8d8a-4297-a8b6-e4ceda3b0f66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.129283] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1111.129283] env[62109]: value = "task-1117240" [ 1111.129283] env[62109]: _type = "Task" [ 1111.129283] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.141366] env[62109]: DEBUG nova.compute.provider_tree [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.148451] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117240, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.284486] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 5c7dbe04-5027-49cd-a478-79046fee1f16] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.639836] env[62109]: DEBUG oslo_vmware.api [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117240, 'name': PowerOnVM_Task, 'duration_secs': 0.416507} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.640176] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1111.640382] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d53196af-c4f3-4632-b5fd-43879d1ecb20 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance '93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1111.644692] env[62109]: DEBUG nova.scheduler.client.report [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1111.788067] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 448371eb-c1dd-4d7b-b946-aaf6c3a3a36d] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.152691] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.155587] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.313s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.156633] env[62109]: INFO nova.compute.claims [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.177533] env[62109]: INFO nova.scheduler.client.report [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted allocations for instance bc75898d-7856-4ecb-9640-ec30538fe90f [ 1112.291124] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 6f31405e-a766-46da-8bf9-7be37a323bf3] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.684639] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65379901-07c8-4a77-ae1b-72d2801df5bc tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "bc75898d-7856-4ecb-9640-ec30538fe90f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.188s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.794757] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 3e641c90-2358-4a1c-9af5-6ad96f722aba] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.265037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b8d5ac-87c2-42de-a03c-2dbab8a8d217 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.273796] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaff16d5-5472-4e6f-85cd-73aaf9b99dad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.305655] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: ac068268-1243-466e-8cd5-1ee2bc248ecd] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.308846] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c7db52-4545-4318-a04e-73c5d8033693 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.317486] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a09d127-3cb4-4420-9029-079f9432baf2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.331961] env[62109]: DEBUG nova.compute.provider_tree [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.675872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.676168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.676370] env[62109]: DEBUG nova.compute.manager [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Going to confirm migration 8 {{(pid=62109) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1113.812191] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 66bbe1e6-e5ee-46a0-b95c-449eef636509] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.835697] env[62109]: DEBUG nova.scheduler.client.report [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1114.116808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.117052] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.238697] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.238943] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.239163] env[62109]: DEBUG nova.network.neutron [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1114.239361] env[62109]: DEBUG nova.objects.instance [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'info_cache' on Instance uuid 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.315991] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 5bea4229-6182-445e-b569-e7413ce92b93] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.342844] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.343476] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1114.395957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.396231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.396441] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.396625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.396797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.398889] env[62109]: INFO nova.compute.manager [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Terminating instance [ 1114.400703] env[62109]: DEBUG nova.compute.manager [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1114.400921] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1114.401822] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2077fb81-4e29-48b9-b597-d77c2879a514 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.410335] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1114.410673] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-596210c4-ace5-42fa-a17d-26ba74f6442f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.418456] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1114.418456] env[62109]: value = "task-1117241" [ 1114.418456] env[62109]: _type = "Task" [ 1114.418456] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.427047] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.619390] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1114.819026] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 0392a352-74e5-4551-9319-eebbc5e20d3b] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.847933] env[62109]: DEBUG nova.compute.utils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1114.849373] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1114.849544] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1114.899921] env[62109]: DEBUG nova.policy [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1114.928606] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117241, 'name': PowerOffVM_Task, 'duration_secs': 0.212263} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.928890] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1114.929084] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1114.929343] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0c25b87-e95a-4cc9-acdd-64486962b4ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.990014] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1114.990262] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1114.990472] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleting the datastore file [datastore2] 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.990771] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9b273d0-4a83-49e8-b90f-36bfbec25250 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.999154] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for the task: (returnval){ [ 1114.999154] env[62109]: value = "task-1117243" [ 1114.999154] env[62109]: _type = "Task" [ 1114.999154] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.007322] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.148208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.148561] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.150283] env[62109]: INFO nova.compute.claims [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.154779] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Successfully created port: 0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1115.321891] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 0f197e98-9630-4928-8707-56bbf6c1e5a1] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.353051] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1115.511661] env[62109]: DEBUG oslo_vmware.api [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Task: {'id': task-1117243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190238} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.511983] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.512202] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1115.512397] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1115.512670] env[62109]: INFO nova.compute.manager [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1115.512877] env[62109]: DEBUG oslo.service.loopingcall [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1115.515589] env[62109]: DEBUG nova.compute.manager [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1115.515725] env[62109]: DEBUG nova.network.neutron [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1115.554341] env[62109]: DEBUG nova.network.neutron [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [{"id": "14e9f4a1-8980-4de2-88f7-dd0162687351", "address": "fa:16:3e:3f:3e:8b", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14e9f4a1-89", "ovs_interfaceid": "14e9f4a1-8980-4de2-88f7-dd0162687351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.826849] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: d7d1029c-9b7c-4bd7-b606-a1962a129461] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.833287] env[62109]: DEBUG nova.compute.manager [req-4446d4b7-691b-4697-b814-f91b978791e0 req-2b82e66c-bc6a-4aab-8bf4-92c20936222f service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Received event network-vif-deleted-a43481f3-cacf-4bd2-9e74-4ca60b37da80 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1115.833500] env[62109]: INFO nova.compute.manager [req-4446d4b7-691b-4697-b814-f91b978791e0 req-2b82e66c-bc6a-4aab-8bf4-92c20936222f service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Neutron deleted interface a43481f3-cacf-4bd2-9e74-4ca60b37da80; detaching it from the instance and deleting it from the info cache [ 1115.833678] env[62109]: DEBUG nova.network.neutron [req-4446d4b7-691b-4697-b814-f91b978791e0 req-2b82e66c-bc6a-4aab-8bf4-92c20936222f service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.057528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.057877] env[62109]: DEBUG nova.objects.instance [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'migration_context' on Instance uuid 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.252275] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0cf725-f43f-4b21-9eec-57eaed82379a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.260692] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5975e0d3-abe3-4631-97f8-40b7d34c017e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.290363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ef7b04-07e2-4995-bd00-10e42d516b08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.297314] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13291fd9-735e-44bb-884e-b0493f365bce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.305667] env[62109]: DEBUG nova.network.neutron [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.314341] env[62109]: DEBUG nova.compute.provider_tree [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.333690] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: b1321874-8f97-4444-9f9c-d586d51a9e92] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.336478] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd8849de-bbd8-4059-8b9a-e36bedfc8e79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.346129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9b55f5-4890-4c87-8bf3-bcdf2ef2e842 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.361521] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1116.371669] env[62109]: DEBUG nova.compute.manager [req-4446d4b7-691b-4697-b814-f91b978791e0 req-2b82e66c-bc6a-4aab-8bf4-92c20936222f service nova] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Detach interface failed, port_id=a43481f3-cacf-4bd2-9e74-4ca60b37da80, reason: Instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1116.385737] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1116.386062] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1116.386231] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.386423] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1116.386576] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.386731] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1116.386939] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1116.387118] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1116.387331] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1116.387520] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1116.387722] env[62109]: DEBUG nova.virt.hardware [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1116.388533] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03262c3-f159-4a2a-b06b-d27fdfb2dffa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.396240] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4a3072-baf3-4f1c-9feb-7ace0a77fe82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.560864] env[62109]: DEBUG nova.objects.base [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Object Instance<93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af> lazy-loaded attributes: info_cache,migration_context {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1116.562124] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f896bd61-bf25-4069-90c6-29b72ba321e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.580297] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f62fcdf-dbbc-4c40-b26d-62ad765d2e8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.585559] env[62109]: DEBUG oslo_vmware.api [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1116.585559] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5243ef79-c4bf-5800-e4bc-a0c06b75e455" [ 1116.585559] env[62109]: _type = "Task" [ 1116.585559] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.594594] env[62109]: DEBUG oslo_vmware.api [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5243ef79-c4bf-5800-e4bc-a0c06b75e455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.676231] env[62109]: DEBUG nova.compute.manager [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Received event network-vif-plugged-0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1116.676231] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] Acquiring lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.676231] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.676542] env[62109]: DEBUG oslo_concurrency.lockutils [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.676580] env[62109]: DEBUG nova.compute.manager [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] No waiting events found dispatching network-vif-plugged-0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1116.676762] env[62109]: WARNING nova.compute.manager [req-d1d74f1d-0ac0-437a-87cb-4c2bad64f440 req-40d63a9c-3783-4f24-9e97-55c0b2735406 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Received unexpected event network-vif-plugged-0674b2a4-d58e-4dcb-b770-308e0b503998 for instance with vm_state building and task_state spawning. [ 1116.766632] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Successfully updated port: 0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1116.816350] env[62109]: INFO nova.compute.manager [-] [instance: 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba] Took 1.30 seconds to deallocate network for instance. [ 1116.817213] env[62109]: DEBUG nova.scheduler.client.report [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1116.835386] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 58365fb6-a38e-4afa-be36-3cdcdbdbc2b4] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.913959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.914237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.914419] env[62109]: INFO nova.compute.manager [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Shelving [ 1117.095673] env[62109]: DEBUG oslo_vmware.api [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5243ef79-c4bf-5800-e4bc-a0c06b75e455, 'name': SearchDatastore_Task, 'duration_secs': 0.007685} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.095982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.269079] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.269366] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.269628] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1117.322623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.323186] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1117.325991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.230s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.328459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.337802] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 3da7aca9-5d65-4f5e-b0a3-7cf5308f0384] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.421489] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1117.421764] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f68d0cc-aeb6-402a-8407-c2f9d3e067f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.429172] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1117.429172] env[62109]: value = "task-1117244" [ 1117.429172] env[62109]: _type = "Task" [ 1117.429172] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.437527] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.800386] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1117.832465] env[62109]: DEBUG nova.compute.utils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1117.833999] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1117.834192] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1117.839829] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 7ace6356-1a81-4095-8286-c9b6d829062b] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.929367] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765be090-936e-443e-8261-52b9cad4542b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.944395] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d1f6f9-c8a0-4de7-88e8-1041c189a567 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.947449] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117244, 'name': PowerOffVM_Task, 'duration_secs': 0.215737} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.948349] env[62109]: DEBUG nova.network.neutron [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Updating instance_info_cache with network_info: [{"id": "0674b2a4-d58e-4dcb-b770-308e0b503998", "address": "fa:16:3e:3c:c4:ad", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0674b2a4-d5", "ovs_interfaceid": "0674b2a4-d58e-4dcb-b770-308e0b503998", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.949572] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1117.950993] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b941b97c-ed83-45d3-801c-95e3519ca45c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.980595] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26fcbb1-d139-48b6-95a6-47aef5374b7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.997647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a38915-79d9-45c9-80f6-6d9540cabe52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.003539] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e87449-1b25-4fd4-93f0-a616dba494a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.021930] env[62109]: DEBUG nova.compute.provider_tree [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.094520] env[62109]: DEBUG nova.policy [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5442deec924240babb834fc704d53cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a363548894df47d5981199004e9884de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1118.337591] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1118.344102] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 5d656f91-d35f-45e1-8892-7cdacd306960] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.391549] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Successfully created port: c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1118.479674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.480020] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Instance network_info: |[{"id": "0674b2a4-d58e-4dcb-b770-308e0b503998", "address": "fa:16:3e:3c:c4:ad", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0674b2a4-d5", "ovs_interfaceid": "0674b2a4-d58e-4dcb-b770-308e0b503998", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1118.480516] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:c4:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0674b2a4-d58e-4dcb-b770-308e0b503998', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1118.487956] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating folder: Project (df642ff4b7f247d09f80b260ed9ef53f). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1118.488265] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0561f6ef-ba87-47f3-a26c-15a6d7c1a825 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.499582] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created folder: Project (df642ff4b7f247d09f80b260ed9ef53f) in parent group-v244329. [ 1118.499787] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating folder: Instances. Parent ref: group-v244552. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1118.500045] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c10d4a7-fdc6-4a7a-9564-daa727f3c6af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.509533] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created folder: Instances in parent group-v244552. [ 1118.509771] env[62109]: DEBUG oslo.service.loopingcall [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.509963] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1118.510205] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2bfbf73-2411-4317-9dff-1b604222f39e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.525506] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1118.526338] env[62109]: DEBUG nova.scheduler.client.report [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1118.529334] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d8a4d266-49f1-44c0-bcc1-1ed66ced362b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.536250] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.536250] env[62109]: value = "task-1117247" [ 1118.536250] env[62109]: _type = "Task" [ 1118.536250] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.537140] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1118.537140] env[62109]: value = "task-1117248" [ 1118.537140] env[62109]: _type = "Task" [ 1118.537140] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.553286] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117248, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.553426] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117247, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.701324] env[62109]: DEBUG nova.compute.manager [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Received event network-changed-0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1118.701696] env[62109]: DEBUG nova.compute.manager [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Refreshing instance network info cache due to event network-changed-0674b2a4-d58e-4dcb-b770-308e0b503998. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1118.702074] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] Acquiring lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.702350] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] Acquired lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.702662] env[62109]: DEBUG nova.network.neutron [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Refreshing network info cache for port 0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1118.848020] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: af3465db-fd56-458d-a499-14df3a0029f0] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.051555] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117248, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.051722] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117247, 'name': CreateVM_Task, 'duration_secs': 0.324705} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.052503] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1119.053188] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.053359] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.053684] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1119.054174] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f01edb41-89c8-4155-b892-75540cd87bb0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.058126] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1119.058126] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525568e1-c49b-175b-64e5-91f76f8ed0a1" [ 1119.058126] env[62109]: _type = "Task" [ 1119.058126] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.065225] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525568e1-c49b-175b-64e5-91f76f8ed0a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.348153] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1119.350415] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c694c178-3894-4997-8e99-8f4900a64848] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.373767] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1119.374081] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1119.374201] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.374389] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1119.374542] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.374697] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1119.374976] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1119.375086] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1119.375263] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1119.375430] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1119.375607] env[62109]: DEBUG nova.virt.hardware [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1119.376474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1700f6-06f0-4c42-a892-5cd7be8759da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.385606] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c629ee-d777-429a-af11-64c3639ed2ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.390046] env[62109]: DEBUG nova.network.neutron [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Updated VIF entry in instance network info cache for port 0674b2a4-d58e-4dcb-b770-308e0b503998. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1119.390217] env[62109]: DEBUG nova.network.neutron [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Updating instance_info_cache with network_info: [{"id": "0674b2a4-d58e-4dcb-b770-308e0b503998", "address": "fa:16:3e:3c:c4:ad", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0674b2a4-d5", "ovs_interfaceid": "0674b2a4-d58e-4dcb-b770-308e0b503998", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.537879] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.212s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.540822] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.213s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.541068] env[62109]: DEBUG nova.objects.instance [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lazy-loading 'resources' on Instance uuid 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.551618] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117248, 'name': CreateSnapshot_Task, 'duration_secs': 0.639627} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.551897] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1119.552652] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5fcdce-f048-4298-a772-0801adc78490 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.572474] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525568e1-c49b-175b-64e5-91f76f8ed0a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009889} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.572677] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.572928] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.573224] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.573401] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.573592] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1119.573836] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc3de4c6-5bf2-4111-b2a0-f16e3fb6c6cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.581586] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1119.581764] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1119.582439] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0f6a484-4eea-462c-811f-f9a8ea589e29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.587485] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1119.587485] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52106136-9963-fe2d-8a94-7e898f3caa3b" [ 1119.587485] env[62109]: _type = "Task" [ 1119.587485] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.594711] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52106136-9963-fe2d-8a94-7e898f3caa3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.785282] env[62109]: DEBUG nova.compute.manager [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1119.785521] env[62109]: DEBUG oslo_concurrency.lockutils [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.785716] env[62109]: DEBUG oslo_concurrency.lockutils [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.785895] env[62109]: DEBUG oslo_concurrency.lockutils [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.786124] env[62109]: DEBUG nova.compute.manager [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] No waiting events found dispatching network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1119.786303] env[62109]: WARNING nova.compute.manager [req-256a9ae9-61f1-4853-873f-ef3039ed3a9d req-ff6d0192-3c27-4f46-a2c4-9523471a0dd2 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received unexpected event network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c for instance with vm_state building and task_state spawning. [ 1119.855780] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 342b7069-22fb-4934-9ec3-8ecbc987696e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.895316] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7530820-c6e8-4c09-8f9d-4babe5352249 req-ef6c4593-4efa-4968-9447-5cbfdf881568 service nova] Releasing lock "refresh_cache-8a64a700-e381-49a0-89ae-8a678ed7a4fb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.921356] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Successfully updated port: c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.073484] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1120.074261] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fb94d360-40a2-47a2-b1c2-d18fb949878f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.083174] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1120.083174] env[62109]: value = "task-1117249" [ 1120.083174] env[62109]: _type = "Task" [ 1120.083174] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.098290] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117249, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.103297] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52106136-9963-fe2d-8a94-7e898f3caa3b, 'name': SearchDatastore_Task, 'duration_secs': 0.00822} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.104453] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bd01027-f32e-4ad5-babb-245eeee038f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.111050] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1120.111050] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5265161b-fa47-ed45-2c68-c5da9337ae6d" [ 1120.111050] env[62109]: _type = "Task" [ 1120.111050] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.118253] env[62109]: INFO nova.scheduler.client.report [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted allocation for migration a0f33392-7def-460f-9d25-e2e9a51f7580 [ 1120.125631] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5265161b-fa47-ed45-2c68-c5da9337ae6d, 'name': SearchDatastore_Task, 'duration_secs': 0.009642} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.126120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.126379] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8a64a700-e381-49a0-89ae-8a678ed7a4fb/8a64a700-e381-49a0-89ae-8a678ed7a4fb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1120.128863] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55c2e80e-cdf2-472a-a586-222f36bad6a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.135281] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1120.135281] env[62109]: value = "task-1117250" [ 1120.135281] env[62109]: _type = "Task" [ 1120.135281] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.146666] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.166646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ec8736-5767-4167-a2a7-87ecafb78dd6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.174853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f702c2d1-76cd-4611-925e-8d8c94ae7a70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.208927] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57e52a1-f174-4712-83cc-f5979a691c24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.216884] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff76dbf-66c1-4b8e-9ac8-bd862e32d202 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.231050] env[62109]: DEBUG nova.compute.provider_tree [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.361264] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 2798a8dd-71e9-4d75-ae5f-f0f1b18a2ca8] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.425100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.425419] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.425699] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1120.595647] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117249, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.627994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1266f6c2-4bfc-4702-a33c-209d31ffbd2e tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.952s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.645553] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469202} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.645827] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 8a64a700-e381-49a0-89ae-8a678ed7a4fb/8a64a700-e381-49a0-89ae-8a678ed7a4fb.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1120.646060] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.646311] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-035b2d5d-13dc-447c-8367-421c4fa8246c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.654315] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1120.654315] env[62109]: value = "task-1117251" [ 1120.654315] env[62109]: _type = "Task" [ 1120.654315] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.662988] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.734677] env[62109]: DEBUG nova.scheduler.client.report [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1120.863171] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 7afbb35b-9865-40a7-8b37-d6a661a186a9] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.936296] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.936558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.936767] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.936955] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.937149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.939165] env[62109]: INFO nova.compute.manager [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Terminating instance [ 1120.943934] env[62109]: DEBUG nova.compute.manager [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1120.944320] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1120.945310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97d7e8f-9422-414b-8a03-8aa29aa51b6e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.953079] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1120.953439] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f665dfb-e21b-412e-9a12-7affbbd2edc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.959997] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1120.959997] env[62109]: value = "task-1117252" [ 1120.959997] env[62109]: _type = "Task" [ 1120.959997] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.969268] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.983530] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1121.095050] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117249, 'name': CloneVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.152613] env[62109]: DEBUG nova.network.neutron [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.167919] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063862} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.168272] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.169110] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000124b8-525d-444f-9d56-6fb51ce088b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.193635] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 8a64a700-e381-49a0-89ae-8a678ed7a4fb/8a64a700-e381-49a0-89ae-8a678ed7a4fb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.194263] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-579e9d3c-a29f-4c11-a312-499426f6d587 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.213612] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1121.213612] env[62109]: value = "task-1117253" [ 1121.213612] env[62109]: _type = "Task" [ 1121.213612] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.221411] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.239843] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.261500] env[62109]: INFO nova.scheduler.client.report [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Deleted allocations for instance 751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba [ 1121.366291] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 9b2968bb-ed06-4740-b43e-b4aa1fac76dd] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1121.469860] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117252, 'name': PowerOffVM_Task, 'duration_secs': 0.224283} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.470324] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1121.470573] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1121.470853] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fad6d0d2-6481-42c9-8793-04848f9b253d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.545545] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1121.545713] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1121.545835] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleting the datastore file [datastore1] 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.546125] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d299ac7e-3343-4fb4-9342-1b319d4a856f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.552243] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1121.552243] env[62109]: value = "task-1117255" [ 1121.552243] env[62109]: _type = "Task" [ 1121.552243] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.559952] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.596013] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117249, 'name': CloneVM_Task, 'duration_secs': 1.048149} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.596293] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Created linked-clone VM from snapshot [ 1121.597014] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25afe1cb-c07d-4034-a236-5caadd7b6e76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.604350] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Uploading image dcff6ca5-4589-4dff-8ead-c334a5a65018 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1121.630921] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1121.630921] env[62109]: value = "vm-244556" [ 1121.630921] env[62109]: _type = "VirtualMachine" [ 1121.630921] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1121.631279] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-89d96fac-5371-4600-8224-2ef849a9d718 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.638097] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease: (returnval){ [ 1121.638097] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5236119a-4a25-a53e-d1e3-661189e18d49" [ 1121.638097] env[62109]: _type = "HttpNfcLease" [ 1121.638097] env[62109]: } obtained for exporting VM: (result){ [ 1121.638097] env[62109]: value = "vm-244556" [ 1121.638097] env[62109]: _type = "VirtualMachine" [ 1121.638097] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1121.638428] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the lease: (returnval){ [ 1121.638428] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5236119a-4a25-a53e-d1e3-661189e18d49" [ 1121.638428] env[62109]: _type = "HttpNfcLease" [ 1121.638428] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1121.644610] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1121.644610] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5236119a-4a25-a53e-d1e3-661189e18d49" [ 1121.644610] env[62109]: _type = "HttpNfcLease" [ 1121.644610] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1121.655568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.655889] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance network_info: |[{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1121.656307] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:75:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c190fa8c-8d46-43de-943d-554e47a2fe5c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.663857] env[62109]: DEBUG oslo.service.loopingcall [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1121.664094] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1121.664312] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afff8597-8537-4111-9104-fd77d0d21751 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.684831] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.684831] env[62109]: value = "task-1117257" [ 1121.684831] env[62109]: _type = "Task" [ 1121.684831] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.692672] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117257, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.722830] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117253, 'name': ReconfigVM_Task, 'duration_secs': 0.26896} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.723133] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 8a64a700-e381-49a0-89ae-8a678ed7a4fb/8a64a700-e381-49a0-89ae-8a678ed7a4fb.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.723792] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30566aa4-2a83-403f-ad06-2c157385191e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.730157] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1121.730157] env[62109]: value = "task-1117258" [ 1121.730157] env[62109]: _type = "Task" [ 1121.730157] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.739602] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117258, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.770154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433f3c1a-a547-4bb1-9259-acfc9a836cad tempest-ServerActionsTestOtherA-1089353556 tempest-ServerActionsTestOtherA-1089353556-project-member] Lock "751bfb4c-7c9d-42bc-8cf1-f32b5928d8ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.374s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.813599] env[62109]: DEBUG nova.compute.manager [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.813887] env[62109]: DEBUG nova.compute.manager [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing instance network info cache due to event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1121.814141] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.814389] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.814627] env[62109]: DEBUG nova.network.neutron [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1121.870587] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c5c63ece-611d-45d1-a8e6-9327700f1563] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1122.063232] env[62109]: DEBUG oslo_vmware.api [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146006} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.063465] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.063661] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1122.063843] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1122.064038] env[62109]: INFO nova.compute.manager [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1122.064298] env[62109]: DEBUG oslo.service.loopingcall [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.064497] env[62109]: DEBUG nova.compute.manager [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1122.064593] env[62109]: DEBUG nova.network.neutron [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1122.146834] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1122.146834] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5236119a-4a25-a53e-d1e3-661189e18d49" [ 1122.146834] env[62109]: _type = "HttpNfcLease" [ 1122.146834] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1122.147123] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1122.147123] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5236119a-4a25-a53e-d1e3-661189e18d49" [ 1122.147123] env[62109]: _type = "HttpNfcLease" [ 1122.147123] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1122.147843] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b9653c-62f0-4cae-8037-102648b7135c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.155707] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1122.155886] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1122.221240] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117257, 'name': CreateVM_Task, 'duration_secs': 0.376792} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.221416] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1122.222059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.222230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.222551] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1122.222817] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ecc0398-50ce-42b8-84b1-77cc09365483 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.227169] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1122.227169] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241e364-6416-4a17-798d-fdbc3d946f75" [ 1122.227169] env[62109]: _type = "Task" [ 1122.227169] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.235930] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241e364-6416-4a17-798d-fdbc3d946f75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.240631] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117258, 'name': Rename_Task, 'duration_secs': 0.151226} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.240891] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1122.241133] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-171607f0-a9dd-4c06-9aa8-53e19b9dc3c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.246104] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-35fdd2b1-f858-4378-8e70-e4d604e9ee73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.249593] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1122.249593] env[62109]: value = "task-1117259" [ 1122.249593] env[62109]: _type = "Task" [ 1122.249593] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.257145] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.374247] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c3b486c6-5aa0-4d4b-9f6d-29ec9f2e41e4] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1122.518239] env[62109]: DEBUG nova.network.neutron [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updated VIF entry in instance network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1122.518744] env[62109]: DEBUG nova.network.neutron [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.740421] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5241e364-6416-4a17-798d-fdbc3d946f75, 'name': SearchDatastore_Task, 'duration_secs': 0.011673} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.740853] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.741099] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.741344] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.742276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.742276] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.742610] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78d2ed96-3f31-45b3-9adb-f6539121a733 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.755359] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.755630] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1122.756679] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c39659c3-7cb4-45c5-82fd-d330ae383e53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.764040] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117259, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.767189] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1122.767189] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52662e63-e7a1-4d55-1733-5f204aeb123d" [ 1122.767189] env[62109]: _type = "Task" [ 1122.767189] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.775863] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52662e63-e7a1-4d55-1733-5f204aeb123d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.877337] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: a24f2349-7c1b-441d-a36e-b16dd61f6031] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.021217] env[62109]: DEBUG oslo_concurrency.lockutils [req-ed7feaf2-4e33-42c2-be59-9cfbbdaeb0d9 req-b8b947ea-ca14-4ff1-98ea-bb80f1dbe841 service nova] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.261737] env[62109]: DEBUG oslo_vmware.api [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117259, 'name': PowerOnVM_Task, 'duration_secs': 0.926991} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.262096] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1123.262313] env[62109]: INFO nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Took 6.90 seconds to spawn the instance on the hypervisor. [ 1123.263143] env[62109]: DEBUG nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1123.264160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8983bfeb-340e-4a5b-8dcc-34aaaae30f4e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.282655] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52662e63-e7a1-4d55-1733-5f204aeb123d, 'name': SearchDatastore_Task, 'duration_secs': 0.011115} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.283690] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4dcf19b-6451-4387-8248-4f8c60310109 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.290153] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1123.290153] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d8418c-97f9-75d1-7593-061ccd64b6f7" [ 1123.290153] env[62109]: _type = "Task" [ 1123.290153] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.299291] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d8418c-97f9-75d1-7593-061ccd64b6f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.381360] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 32cccd30-278c-48b6-8855-5cd76c2da057] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.438467] env[62109]: DEBUG nova.network.neutron [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.793689] env[62109]: INFO nova.compute.manager [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Took 14.97 seconds to build instance. [ 1123.805087] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d8418c-97f9-75d1-7593-061ccd64b6f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010445} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.805802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.806635] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1123.807068] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30e981f2-f446-4a44-bd06-2a4f3ffb54f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.815561] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1123.815561] env[62109]: value = "task-1117260" [ 1123.815561] env[62109]: _type = "Task" [ 1123.815561] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.825345] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.843733] env[62109]: DEBUG nova.compute.manager [req-175c0da3-de90-46f0-b4f8-78aa2cba1d2e req-2b89da25-f1fb-44a1-b126-10899f2fb1cf service nova] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Received event network-vif-deleted-14e9f4a1-8980-4de2-88f7-dd0162687351 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1123.885110] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 8b63f9a1-5639-48b2-b0a9-30380835bef2] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.942533] env[62109]: INFO nova.compute.manager [-] [instance: 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af] Took 1.88 seconds to deallocate network for instance. [ 1124.298946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1cb5e64d-008e-4d6f-a6f6-99a3568f8b1d tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.477s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.326448] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117260, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.389460] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 7f40cdc8-3421-47b7-b148-ff6417105dbb] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.449278] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.449899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.449980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.469615] env[62109]: INFO nova.scheduler.client.report [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted allocations for instance 93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af [ 1124.831188] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631423} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.831188] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1124.831188] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1124.831188] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a24d85cf-f564-4efb-9dec-911d5de77f06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.838394] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1124.838394] env[62109]: value = "task-1117261" [ 1124.838394] env[62109]: _type = "Task" [ 1124.838394] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.847570] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117261, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.893050] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 8b6ec904-8c68-4eaa-94fe-47a87528e26b] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.984781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd0b97ed-5a7e-474e-a2fa-d7295743c302 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "93b2c3f9-7ae7-49bd-9f7a-fe59611bf9af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.048s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.348447] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117261, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10619} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.348597] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.349351] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e28e54-6cba-4e4d-8124-4b1883abeb63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.372643] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.372940] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0613c6d2-8f4a-4cb2-816f-ca77c8c128ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.398766] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 028300fd-f9f8-4606-a39e-53582f830eeb] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.401699] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1125.401699] env[62109]: value = "task-1117262" [ 1125.401699] env[62109]: _type = "Task" [ 1125.401699] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.411140] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117262, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.564252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.564252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.903087] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: a9fb75d5-e303-4f31-888d-528963ab23b7] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.921344] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117262, 'name': ReconfigVM_Task, 'duration_secs': 0.296345} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.921704] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Reconfigured VM instance instance-0000006a to attach disk [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1125.922419] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b78a105-cc3c-4926-8073-b9cf291c14d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.930428] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1125.930428] env[62109]: value = "task-1117263" [ 1125.930428] env[62109]: _type = "Task" [ 1125.930428] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.940117] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117263, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.072033] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1126.243358] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.243600] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.409403] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 55381bef-dab5-44cd-97fe-9fc75ab61d0e] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.441900] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117263, 'name': Rename_Task, 'duration_secs': 0.159494} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.442899] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1126.443193] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a32e875-62d0-4976-8890-c9c1c4dd47a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.450207] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1126.450207] env[62109]: value = "task-1117264" [ 1126.450207] env[62109]: _type = "Task" [ 1126.450207] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.458865] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.599486] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.599770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.601382] env[62109]: INFO nova.compute.claims [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.746235] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1126.913916] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1399f618-3a93-4731-a59b-f98306d6cd52] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.961077] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.266192] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.417204] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: c753a2db-d701-4508-88bd-4ebe4f32a075] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1127.461822] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117264, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.703636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d56014-0edc-42f6-b937-673041ec4459 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.712268] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4206d7-2b5f-4fb6-a70f-8a132e549988 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.744283] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73eb8ef-098a-45af-8ad2-bb3046fbaecd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.751983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03f3a99-30a4-4d82-bb3a-fe610a5b76ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.766531] env[62109]: DEBUG nova.compute.provider_tree [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.923112] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.923112] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Cleaning up deleted instances with incomplete migration {{(pid=62109) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1127.963146] env[62109]: DEBUG oslo_vmware.api [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117264, 'name': PowerOnVM_Task, 'duration_secs': 1.078855} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.963507] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1127.964213] env[62109]: INFO nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1127.964547] env[62109]: DEBUG nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1127.965683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7e0a65-360c-4ed2-9345-0a47e5b63ac0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.269904] env[62109]: DEBUG nova.scheduler.client.report [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1128.411697] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.412096] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.426018] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.487020] env[62109]: INFO nova.compute.manager [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Took 13.36 seconds to build instance. [ 1128.759705] env[62109]: DEBUG nova.compute.manager [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1128.759948] env[62109]: DEBUG nova.compute.manager [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing instance network info cache due to event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1128.760208] env[62109]: DEBUG oslo_concurrency.lockutils [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.760407] env[62109]: DEBUG oslo_concurrency.lockutils [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.760639] env[62109]: DEBUG nova.network.neutron [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1128.778547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.179s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.779065] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1128.781919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.516s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.783409] env[62109]: INFO nova.compute.claims [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.914899] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1128.988092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-099ed5b2-dfdd-44d1-a792-53f532fa397c tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.871s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.288204] env[62109]: DEBUG nova.compute.utils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.291964] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1129.292229] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1129.334882] env[62109]: DEBUG nova.policy [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1129.417366] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.417366] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.417366] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1129.444867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.504198] env[62109]: DEBUG nova.network.neutron [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updated VIF entry in instance network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1129.504559] env[62109]: DEBUG nova.network.neutron [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.597210] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Successfully created port: 17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.795932] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1129.914486] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ffc57c-1651-44ae-aa46-2a37bef6da60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.929255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d148c7ff-7e60-425b-910f-18778776736b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.961621] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d58780b-e85f-49f5-986b-26472f6b5741 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.965389] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.965529] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.965675] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1129.970459] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d704ab1-ba1c-4d3d-a313-60e18d16bbbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.984645] env[62109]: DEBUG nova.compute.provider_tree [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.006925] env[62109]: DEBUG oslo_concurrency.lockutils [req-226951d2-59fc-485a-a92b-1d46dabc0a88 req-476daba3-d7a2-4a41-8b48-13fd4683b451 service nova] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.486909] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1130.490279] env[62109]: DEBUG nova.scheduler.client.report [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1130.808589] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1130.835598] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1130.835861] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1130.836035] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.836230] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1130.836384] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.836537] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1130.836750] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1130.836915] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1130.837104] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1130.837279] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1130.837459] env[62109]: DEBUG nova.virt.hardware [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1130.838340] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf33f1c-7b22-4fce-ac8f-e8ab420e0560 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.846534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4969ce88-aae6-46e7-82a0-c9f875625488 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.995286] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.995888] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1131.003980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.003980] env[62109]: INFO nova.compute.claims [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1131.011417] env[62109]: DEBUG nova.compute.manager [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Received event network-vif-plugged-17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1131.011679] env[62109]: DEBUG oslo_concurrency.lockutils [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] Acquiring lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.012289] env[62109]: DEBUG oslo_concurrency.lockutils [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.012618] env[62109]: DEBUG oslo_concurrency.lockutils [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.012868] env[62109]: DEBUG nova.compute.manager [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] No waiting events found dispatching network-vif-plugged-17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1131.013146] env[62109]: WARNING nova.compute.manager [req-9265349a-4d5c-4bc0-816b-7ead8bba015c req-dc6e7b32-ddd1-4b02-8568-91c543018354 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Received unexpected event network-vif-plugged-17f64e3a-f17f-4264-a8a8-8fc27a55283b for instance with vm_state building and task_state spawning. [ 1131.082792] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.503053] env[62109]: DEBUG nova.compute.utils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1131.504611] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1131.504808] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1131.540598] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Successfully updated port: 17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.549540] env[62109]: DEBUG nova.policy [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b39ff10ac8bd4e4abf04fd881e5125ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e5867b8b7e4ed18c5395baf46db66f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1131.565133] env[62109]: DEBUG nova.compute.manager [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Received event network-changed-17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1131.565133] env[62109]: DEBUG nova.compute.manager [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Refreshing instance network info cache due to event network-changed-17f64e3a-f17f-4264-a8a8-8fc27a55283b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1131.565133] env[62109]: DEBUG oslo_concurrency.lockutils [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] Acquiring lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.565497] env[62109]: DEBUG oslo_concurrency.lockutils [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] Acquired lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.565497] env[62109]: DEBUG nova.network.neutron [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Refreshing network info cache for port 17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1131.586090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.586090] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1131.586090] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.586333] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.586333] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.839231] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Successfully created port: 8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.008318] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1132.046662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.091981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.100133] env[62109]: DEBUG nova.network.neutron [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1132.190404] env[62109]: DEBUG nova.network.neutron [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.223512] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74569f66-22cf-41af-bb5d-8ddb80d137fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.231534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaf7c7c-7e5f-4615-82b5-3d0569758d3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.261658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210b37a4-2998-463a-9d9f-50a330d60d87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.269742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45faf34c-bd8a-4f13-9759-aa1213aaaa12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.284323] env[62109]: DEBUG nova.compute.provider_tree [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.692655] env[62109]: DEBUG oslo_concurrency.lockutils [req-651a32cc-9cd5-4738-b99e-dce35603dcd7 req-a9b086c5-0fb8-4285-956f-bc219ad295f9 service nova] Releasing lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.693062] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.693229] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1132.787818] env[62109]: DEBUG nova.scheduler.client.report [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1133.019877] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1133.040948] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1133.041221] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1133.041383] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.041572] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1133.041726] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.041878] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1133.042098] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1133.042265] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1133.042435] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1133.042602] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1133.042779] env[62109]: DEBUG nova.virt.hardware [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.043686] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76649ece-8668-4adf-924b-de3de13473ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.052263] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ad01ec-8033-49f2-a8a3-2d57a49b3037 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.072040] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1133.072808] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84779e78-6dc4-43cd-bd67-b77aac6e6d20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.078169] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1133.078340] env[62109]: ERROR oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk due to incomplete transfer. [ 1133.078545] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d7615f6e-7825-465d-ab40-c91ba0c2c1d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.085065] env[62109]: DEBUG oslo_vmware.rw_handles [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235415d-a07d-3858-606a-f3e3c5363cb3/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1133.085263] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Uploaded image dcff6ca5-4589-4dff-8ead-c334a5a65018 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1133.087581] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1133.087807] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0b7a4946-c62a-4c21-9047-8dcc725e0f76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.092758] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1133.092758] env[62109]: value = "task-1117265" [ 1133.092758] env[62109]: _type = "Task" [ 1133.092758] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.100371] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117265, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.224409] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1133.229612] env[62109]: DEBUG nova.compute.manager [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Received event network-vif-plugged-8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1133.229612] env[62109]: DEBUG oslo_concurrency.lockutils [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.229612] env[62109]: DEBUG oslo_concurrency.lockutils [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.229612] env[62109]: DEBUG oslo_concurrency.lockutils [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.229612] env[62109]: DEBUG nova.compute.manager [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] No waiting events found dispatching network-vif-plugged-8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1133.229784] env[62109]: WARNING nova.compute.manager [req-cbf392f8-fbd2-4a74-a5fd-792d360cf6e3 req-91b74d18-8ad0-4975-8082-1de781f8641d service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Received unexpected event network-vif-plugged-8a4912e9-48e5-4762-aad9-050359873623 for instance with vm_state building and task_state spawning. [ 1133.293381] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.294415] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.203s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.294649] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.294727] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1133.296324] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628a1e9c-c46c-446e-88b6-dbb3a9ebce9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.305154] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac004c9-a514-4d63-b498-3ebd5cf2db61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.319447] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdeaabc-5580-4568-99ec-c3f56ccf422e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.326235] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf67607-c54d-4928-8789-d6bba8143992 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.360393] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180418MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1133.360574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.360763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.363517] env[62109]: DEBUG nova.network.neutron [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Updating instance_info_cache with network_info: [{"id": "17f64e3a-f17f-4264-a8a8-8fc27a55283b", "address": "fa:16:3e:1e:48:79", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17f64e3a-f1", "ovs_interfaceid": "17f64e3a-f17f-4264-a8a8-8fc27a55283b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.378462] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Successfully updated port: 8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1133.603108] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117265, 'name': Destroy_Task, 'duration_secs': 0.330649} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.603386] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Destroyed the VM [ 1133.603624] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1133.603873] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3abe978a-4d5b-4fc9-a9b7-db50c4d05a47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.609449] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1133.609449] env[62109]: value = "task-1117266" [ 1133.609449] env[62109]: _type = "Task" [ 1133.609449] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.616779] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117266, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.797403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "3bae3055-64d4-4c41-9628-a9512a8d0a4e" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.797728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "3bae3055-64d4-4c41-9628-a9512a8d0a4e" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.867235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-06af64af-5448-49f9-9d5a-12dad1bdfe29" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.867526] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Instance network_info: |[{"id": "17f64e3a-f17f-4264-a8a8-8fc27a55283b", "address": "fa:16:3e:1e:48:79", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17f64e3a-f1", "ovs_interfaceid": "17f64e3a-f17f-4264-a8a8-8fc27a55283b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1133.868133] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:48:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17f64e3a-f17f-4264-a8a8-8fc27a55283b', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.875951] env[62109]: DEBUG oslo.service.loopingcall [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.876694] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1133.877334] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5aae5f47-5df9-411d-9b8b-e91a93340e0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.893113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.893254] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.893396] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1133.900850] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.900850] env[62109]: value = "task-1117267" [ 1133.900850] env[62109]: _type = "Task" [ 1133.900850] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.909219] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117267, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.119354] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117266, 'name': RemoveSnapshot_Task, 'duration_secs': 0.417759} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.119632] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1134.119905] env[62109]: DEBUG nova.compute.manager [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1134.120731] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6973aa-0b4e-4472-82fc-c413dcef924c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.300206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "3bae3055-64d4-4c41-9628-a9512a8d0a4e" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.502s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.300846] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1134.389967] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.389967] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.389967] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8a64a700-e381-49a0-89ae-8a678ed7a4fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.389967] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c7a95d76-b143-45ce-87b3-de0b63e53169 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.390199] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 06af64af-5448-49f9-9d5a-12dad1bdfe29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.390236] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance fdc4486a-4837-4006-87c8-166cd5c41fcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.391025] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.391025] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1134.391025] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1134.413620] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117267, 'name': CreateVM_Task, 'duration_secs': 0.320699} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.413787] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1134.414467] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.414651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.414964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1134.415243] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6de7fb5-9ead-488d-b81b-cc95f0ad5d84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.422215] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1134.422215] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52eacf95-56e2-46c7-2141-38e88f5253f3" [ 1134.422215] env[62109]: _type = "Task" [ 1134.422215] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.429688] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52eacf95-56e2-46c7-2141-38e88f5253f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.430468] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.482033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a25e48-5e7b-4865-8af2-d4e78ed8f126 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.489800] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd6aa9d-0149-4741-8b08-ea2889a2d4c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.518901] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5a86bc-70bb-4061-baa6-1c7154d80ffa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.526921] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e347c58f-041b-46a6-ac87-cf3f1338c8c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.539542] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.591695] env[62109]: DEBUG nova.network.neutron [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.632787] env[62109]: INFO nova.compute.manager [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Shelve offloading [ 1134.634624] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1134.634959] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-904ecf44-d205-454c-b761-d86579bcac7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.642165] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1134.642165] env[62109]: value = "task-1117268" [ 1134.642165] env[62109]: _type = "Task" [ 1134.642165] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.805518] env[62109]: DEBUG nova.compute.utils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1134.806972] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1134.807150] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1134.858468] env[62109]: DEBUG nova.policy [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2640e778844d38adee470877490ef8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f840dc383ca549d1940f745e267702cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1134.933264] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52eacf95-56e2-46c7-2141-38e88f5253f3, 'name': SearchDatastore_Task, 'duration_secs': 0.040659} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.933613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.933852] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.934112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.934331] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.934532] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.934824] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df589d88-a28e-401e-89c3-f6be9946987a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.943742] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.943963] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1134.944717] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8cb9eae-0067-4f7d-bd4f-4694ce84fe42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.949652] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1134.949652] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52625ffb-17ee-51ce-3600-34d02cdf68d4" [ 1134.949652] env[62109]: _type = "Task" [ 1134.949652] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.956810] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52625ffb-17ee-51ce-3600-34d02cdf68d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.042228] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1135.093813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.094169] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Instance network_info: |[{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1135.095079] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Successfully created port: 7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.097295] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:58:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a4912e9-48e5-4762-aad9-050359873623', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.104988] env[62109]: DEBUG oslo.service.loopingcall [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1135.105241] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1135.105840] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13f408c5-987d-4bb4-aafc-77b2aa7ab7da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.132029] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.132029] env[62109]: value = "task-1117269" [ 1135.132029] env[62109]: _type = "Task" [ 1135.132029] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.138114] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117269, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.153018] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1135.153018] env[62109]: DEBUG nova.compute.manager [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1135.153018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5bd4f5-d331-42d6-89df-ef5e8b591be1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.157769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.158125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.159424] env[62109]: DEBUG nova.network.neutron [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.255168] env[62109]: DEBUG nova.compute.manager [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Received event network-changed-8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1135.255335] env[62109]: DEBUG nova.compute.manager [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Refreshing instance network info cache due to event network-changed-8a4912e9-48e5-4762-aad9-050359873623. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1135.255619] env[62109]: DEBUG oslo_concurrency.lockutils [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.255789] env[62109]: DEBUG oslo_concurrency.lockutils [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.255997] env[62109]: DEBUG nova.network.neutron [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Refreshing network info cache for port 8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1135.310097] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1135.459536] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52625ffb-17ee-51ce-3600-34d02cdf68d4, 'name': SearchDatastore_Task, 'duration_secs': 0.034859} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.460331] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2b90ba7-402c-4898-a229-7ee33be539ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.465143] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1135.465143] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526bc901-9ad1-5296-3e63-a95a85a46e13" [ 1135.465143] env[62109]: _type = "Task" [ 1135.465143] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.472178] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526bc901-9ad1-5296-3e63-a95a85a46e13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.546966] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1135.547263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.186s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.639764] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117269, 'name': CreateVM_Task, 'duration_secs': 0.30438} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.639941] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1135.640651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.640900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.641183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1135.641438] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a645a9-a92c-4a04-a645-f4fdb95dcc4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.646930] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1135.646930] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52edf50f-eb3a-1ea8-1dd3-d457c3d2f411" [ 1135.646930] env[62109]: _type = "Task" [ 1135.646930] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.655366] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52edf50f-eb3a-1ea8-1dd3-d457c3d2f411, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.975529] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]526bc901-9ad1-5296-3e63-a95a85a46e13, 'name': SearchDatastore_Task, 'duration_secs': 0.008392} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.978663] env[62109]: DEBUG nova.network.neutron [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.979851] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.980108] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 06af64af-5448-49f9-9d5a-12dad1bdfe29/06af64af-5448-49f9-9d5a-12dad1bdfe29.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1135.980374] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-195ac5c6-78c6-435e-8161-942a4772b835 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.988104] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1135.988104] env[62109]: value = "task-1117270" [ 1135.988104] env[62109]: _type = "Task" [ 1135.988104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.997430] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.047458] env[62109]: DEBUG nova.network.neutron [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updated VIF entry in instance network info cache for port 8a4912e9-48e5-4762-aad9-050359873623. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1136.047837] env[62109]: DEBUG nova.network.neutron [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.158792] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52edf50f-eb3a-1ea8-1dd3-d457c3d2f411, 'name': SearchDatastore_Task, 'duration_secs': 0.009347} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.159235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.159544] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.159815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.159970] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.160196] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.160582] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bc1cbfc-6500-4434-af52-53aadd42f5b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.174103] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.174348] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1136.175108] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-215eee61-e156-4796-b8cc-f9efb1ee1653 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.181246] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1136.181246] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215d169-21b7-598e-3945-4b26039e9364" [ 1136.181246] env[62109]: _type = "Task" [ 1136.181246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.189810] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215d169-21b7-598e-3945-4b26039e9364, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.320291] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1136.349626] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1136.349889] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1136.350069] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.350271] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1136.350426] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.350619] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1136.350860] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1136.351044] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1136.351226] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1136.351412] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1136.351688] env[62109]: DEBUG nova.virt.hardware [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1136.352612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135ad105-7060-4214-8a5d-220d119a3104 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.361098] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0917a95-9fb2-497b-8b38-5297bafc0761 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.484766] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.498484] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117270, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438538} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.498731] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 06af64af-5448-49f9-9d5a-12dad1bdfe29/06af64af-5448-49f9-9d5a-12dad1bdfe29.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1136.498955] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.499334] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c9f88f8-b57e-46d2-8adf-0598bfaad306 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.506317] env[62109]: DEBUG nova.compute.manager [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Received event network-vif-plugged-7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1136.506531] env[62109]: DEBUG oslo_concurrency.lockutils [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] Acquiring lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.506741] env[62109]: DEBUG oslo_concurrency.lockutils [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.506913] env[62109]: DEBUG oslo_concurrency.lockutils [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.507134] env[62109]: DEBUG nova.compute.manager [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] No waiting events found dispatching network-vif-plugged-7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1136.507274] env[62109]: WARNING nova.compute.manager [req-31ee2752-7afe-4f7a-8116-cdc19388df73 req-6f8850e4-4f34-445a-8f23-7f60627db86f service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Received unexpected event network-vif-plugged-7acb7bd6-0ed9-4960-8302-e1956fde3555 for instance with vm_state building and task_state spawning. [ 1136.509778] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1136.509778] env[62109]: value = "task-1117271" [ 1136.509778] env[62109]: _type = "Task" [ 1136.509778] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.517832] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.550279] env[62109]: DEBUG oslo_concurrency.lockutils [req-5eecfda5-9d83-41d1-9573-d534785eb42d req-cefb42bf-cf5a-4721-9148-e6c80781fbf1 service nova] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.692497] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5215d169-21b7-598e-3945-4b26039e9364, 'name': SearchDatastore_Task, 'duration_secs': 0.050291} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.693410] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-421213d6-72bd-4b1a-b228-bb4119fe27e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.698939] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1136.698939] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52183a0a-0ebd-1f58-8454-e558505b1b14" [ 1136.698939] env[62109]: _type = "Task" [ 1136.698939] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.708242] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52183a0a-0ebd-1f58-8454-e558505b1b14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.739987] env[62109]: DEBUG nova.compute.manager [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-vif-unplugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1136.740401] env[62109]: DEBUG oslo_concurrency.lockutils [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.740401] env[62109]: DEBUG oslo_concurrency.lockutils [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.740618] env[62109]: DEBUG oslo_concurrency.lockutils [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.740756] env[62109]: DEBUG nova.compute.manager [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] No waiting events found dispatching network-vif-unplugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1136.740933] env[62109]: WARNING nova.compute.manager [req-cc7281e9-bb37-45c0-86c8-bf5ce78c12bd req-3c87419f-8ff2-4226-af2c-a37a9e622eeb service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received unexpected event network-vif-unplugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb for instance with vm_state shelved and task_state shelving_offloading. [ 1137.015478] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1137.016372] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae5f427-18f5-49ed-9bfa-697de6918e86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.021830] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071781} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.022486] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1137.023426] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb143686-355a-4880-a95c-9bd516ac5398 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.027527] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1137.028102] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3012976a-a8e8-4341-b668-fcb3aa0b21aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.047119] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 06af64af-5448-49f9-9d5a-12dad1bdfe29/06af64af-5448-49f9-9d5a-12dad1bdfe29.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1137.047373] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd73fad9-94b6-4b98-8ae8-a77625b5c371 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.066318] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1137.066318] env[62109]: value = "task-1117273" [ 1137.066318] env[62109]: _type = "Task" [ 1137.066318] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.075267] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.093730] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1137.094151] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1137.094369] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.094632] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59f52cdf-b201-48b3-9437-6a2633922c13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.101355] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1137.101355] env[62109]: value = "task-1117274" [ 1137.101355] env[62109]: _type = "Task" [ 1137.101355] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.109148] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.112453] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Successfully updated port: 7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.210646] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52183a0a-0ebd-1f58-8454-e558505b1b14, 'name': SearchDatastore_Task, 'duration_secs': 0.011656} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.210944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.211224] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1137.211482] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6972496a-8e49-487d-82a0-b4ca7a5551f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.217751] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1137.217751] env[62109]: value = "task-1117275" [ 1137.217751] env[62109]: _type = "Task" [ 1137.217751] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.224921] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.284352] env[62109]: DEBUG nova.compute.manager [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Received event network-changed-7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1137.284663] env[62109]: DEBUG nova.compute.manager [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Refreshing instance network info cache due to event network-changed-7acb7bd6-0ed9-4960-8302-e1956fde3555. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1137.284919] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] Acquiring lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.285138] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] Acquired lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.285314] env[62109]: DEBUG nova.network.neutron [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Refreshing network info cache for port 7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1137.576653] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117273, 'name': ReconfigVM_Task, 'duration_secs': 0.332627} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.576956] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 06af64af-5448-49f9-9d5a-12dad1bdfe29/06af64af-5448-49f9-9d5a-12dad1bdfe29.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.577669] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0f88ec7-e76c-49cf-aa9e-9d73ce8aa301 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.584719] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1137.584719] env[62109]: value = "task-1117276" [ 1137.584719] env[62109]: _type = "Task" [ 1137.584719] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.593444] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117276, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.610851] env[62109]: DEBUG oslo_vmware.api [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210595} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.611215] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.611452] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1137.611694] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1137.614424] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.636427] env[62109]: INFO nova.scheduler.client.report [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted allocations for instance 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 [ 1137.727969] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117275, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.836397] env[62109]: DEBUG nova.network.neutron [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1137.983883] env[62109]: DEBUG nova.network.neutron [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.094790] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117276, 'name': Rename_Task, 'duration_secs': 0.191206} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.095167] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1138.095272] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d31286e-42cb-4773-8536-0fec3f85d3e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.100779] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1138.100779] env[62109]: value = "task-1117277" [ 1138.100779] env[62109]: _type = "Task" [ 1138.100779] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.108140] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.141546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.141899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.142765] env[62109]: DEBUG nova.objects.instance [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'resources' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.228094] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.228423] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1138.228682] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.228967] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b21abf47-314f-4a17-bb9e-72dffc6538b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.235880] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1138.235880] env[62109]: value = "task-1117278" [ 1138.235880] env[62109]: _type = "Task" [ 1138.235880] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.244162] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.486985] env[62109]: DEBUG oslo_concurrency.lockutils [req-8e812375-b7d9-421e-95e2-40e8fa98c5e5 req-6b13d4d7-6c9a-434a-ad9f-33ab617d4f77 service nova] Releasing lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.487440] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquired lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.487605] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1138.612790] env[62109]: DEBUG oslo_vmware.api [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117277, 'name': PowerOnVM_Task, 'duration_secs': 0.498222} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.613156] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1138.613417] env[62109]: INFO nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1138.613629] env[62109]: DEBUG nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1138.614698] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e558933b-8277-42a0-a088-c77353462294 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.646376] env[62109]: DEBUG nova.objects.instance [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'numa_topology' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.746634] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.193229} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.746924] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.747768] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268d6acb-e0b0-40eb-a82a-ef09acc980e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.771515] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.772986] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba0d7bdf-cdb0-41b5-a764-7ba2df3fcab8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.788077] env[62109]: DEBUG nova.compute.manager [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1138.788200] env[62109]: DEBUG nova.compute.manager [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing instance network info cache due to event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1138.788785] env[62109]: DEBUG oslo_concurrency.lockutils [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.788785] env[62109]: DEBUG oslo_concurrency.lockutils [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.788785] env[62109]: DEBUG nova.network.neutron [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1138.796314] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1138.796314] env[62109]: value = "task-1117279" [ 1138.796314] env[62109]: _type = "Task" [ 1138.796314] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.806808] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117279, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.020895] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1139.135028] env[62109]: INFO nova.compute.manager [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Took 12.56 seconds to build instance. [ 1139.149066] env[62109]: DEBUG nova.objects.base [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Object Instance<1b3d7fa7-5428-460e-ab47-49c6d38f24a5> lazy-loaded attributes: resources,numa_topology {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1139.191542] env[62109]: DEBUG nova.network.neutron [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Updating instance_info_cache with network_info: [{"id": "7acb7bd6-0ed9-4960-8302-e1956fde3555", "address": "fa:16:3e:2d:cb:61", "network": {"id": "00580bad-c56d-4154-b2da-81971ad2ab80", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1658101028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f840dc383ca549d1940f745e267702cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7acb7bd6-0e", "ovs_interfaceid": "7acb7bd6-0ed9-4960-8302-e1956fde3555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.240101] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546586ad-a5b8-4469-b365-5415b67502c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.247849] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e7ae90-35d7-495f-baf7-dad7ae38dad4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.277772] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ede1e2-4b18-4f1e-ae33-513956c9eb8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.285376] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5340b3eb-34f4-4ad0-800f-0e28b39afc88 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.300448] env[62109]: DEBUG nova.compute.provider_tree [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.309570] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117279, 'name': ReconfigVM_Task, 'duration_secs': 0.26845} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.310328] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfigured VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.310937] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b99c352b-fc09-4439-8c3b-bccb44c0fe35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.317195] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1139.317195] env[62109]: value = "task-1117280" [ 1139.317195] env[62109]: _type = "Task" [ 1139.317195] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.329954] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117280, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.636689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba2112cd-f75f-4bed-b135-21e9389ae88f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.073s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.694596] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Releasing lock "refresh_cache-c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.694923] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Instance network_info: |[{"id": "7acb7bd6-0ed9-4960-8302-e1956fde3555", "address": "fa:16:3e:2d:cb:61", "network": {"id": "00580bad-c56d-4154-b2da-81971ad2ab80", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1658101028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f840dc383ca549d1940f745e267702cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7acb7bd6-0e", "ovs_interfaceid": "7acb7bd6-0ed9-4960-8302-e1956fde3555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1139.695400] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:cb:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7acb7bd6-0ed9-4960-8302-e1956fde3555', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1139.702766] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Creating folder: Project (f840dc383ca549d1940f745e267702cc). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1139.705753] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3718021f-9a42-467d-95d9-f9af43fefb2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.777462] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Created folder: Project (f840dc383ca549d1940f745e267702cc) in parent group-v244329. [ 1139.777680] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Creating folder: Instances. Parent ref: group-v244560. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1139.777934] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1934619a-26c7-4ec2-9650-c5ef8685dc25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.780241] env[62109]: DEBUG nova.network.neutron [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updated VIF entry in instance network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1139.780682] env[62109]: DEBUG nova.network.neutron [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa04abfce-a9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.789781] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Created folder: Instances in parent group-v244560. [ 1139.790044] env[62109]: DEBUG oslo.service.loopingcall [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1139.790282] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1139.790568] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d30f73e4-907c-4198-93db-4341c7dfdfd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.808869] env[62109]: DEBUG nova.scheduler.client.report [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1139.823296] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1139.823296] env[62109]: value = "task-1117283" [ 1139.823296] env[62109]: _type = "Task" [ 1139.823296] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.829829] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117280, 'name': Rename_Task, 'duration_secs': 0.145278} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.830422] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1139.830666] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88aa4bec-503c-4f0f-99e3-d09a1ca1ab1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.835544] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117283, 'name': CreateVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.839869] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1139.839869] env[62109]: value = "task-1117284" [ 1139.839869] env[62109]: _type = "Task" [ 1139.839869] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.848228] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.920921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.283491] env[62109]: DEBUG oslo_concurrency.lockutils [req-067ace08-0956-4954-9b3b-31781793c872 req-75c57a97-8616-4187-872f-ad1374bf789c service nova] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.315669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.318239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.318438] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.318711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.318910] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.319096] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.322527] env[62109]: INFO nova.compute.manager [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Terminating instance [ 1140.325173] env[62109]: DEBUG nova.compute.manager [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1140.325173] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1140.330830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231d8bf9-169f-4d8e-8836-71e9a888ae6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.340454] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1140.348025] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7985d0e1-1a74-47ae-8390-550bb24b2308 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.349731] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117283, 'name': CreateVM_Task, 'duration_secs': 0.41818} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.351117] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1140.352191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.352441] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.352879] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1140.357745] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f9553b5-b9e8-4a1b-905c-51cca0c78b61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.359812] env[62109]: DEBUG oslo_vmware.api [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117284, 'name': PowerOnVM_Task, 'duration_secs': 0.434081} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.361641] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1140.361938] env[62109]: INFO nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Took 7.34 seconds to spawn the instance on the hypervisor. [ 1140.362232] env[62109]: DEBUG nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1140.362684] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1140.362684] env[62109]: value = "task-1117285" [ 1140.362684] env[62109]: _type = "Task" [ 1140.362684] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.364079] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e381c-395b-4718-bcee-6493b9f373ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.371713] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1140.371713] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a773b4-e664-ba9b-53bf-fe7b8ed5e6f8" [ 1140.371713] env[62109]: _type = "Task" [ 1140.371713] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.381808] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.387433] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a773b4-e664-ba9b-53bf-fe7b8ed5e6f8, 'name': SearchDatastore_Task, 'duration_secs': 0.012783} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.387711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.387945] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.388199] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.388350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.388548] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.388784] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6078b278-2871-4057-96c7-1f365efc4808 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.396262] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.396439] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1140.397152] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7043e9a1-11d2-4695-84d3-5753c8ae1af7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.402375] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1140.402375] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f820eb-ffcb-5c5d-ab2e-08c5c17e9b80" [ 1140.402375] env[62109]: _type = "Task" [ 1140.402375] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.411378] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f820eb-ffcb-5c5d-ab2e-08c5c17e9b80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.826848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f02d4f31-9309-4d22-8247-2553924cce46 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.912s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.827764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.907s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.827952] env[62109]: INFO nova.compute.manager [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Unshelving [ 1140.877367] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117285, 'name': PowerOffVM_Task, 'duration_secs': 0.207662} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.877680] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1140.877859] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1140.878161] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78372611-d7c2-4241-8b6c-ff2f18d78c82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.889152] env[62109]: INFO nova.compute.manager [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Took 13.64 seconds to build instance. [ 1140.913598] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f820eb-ffcb-5c5d-ab2e-08c5c17e9b80, 'name': SearchDatastore_Task, 'duration_secs': 0.010702} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.914468] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42831029-9f40-40f7-b75c-1b0a99d16f63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.920535] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1140.920535] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dc4b77-fd52-1d22-2bfa-b78138fbbdab" [ 1140.920535] env[62109]: _type = "Task" [ 1140.920535] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.931308] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dc4b77-fd52-1d22-2bfa-b78138fbbdab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.964943] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1140.965125] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1140.965294] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore1] 06af64af-5448-49f9-9d5a-12dad1bdfe29 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.965571] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6c0a380-0499-4edf-8d9a-9addc36cefed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.972611] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1140.972611] env[62109]: value = "task-1117287" [ 1140.972611] env[62109]: _type = "Task" [ 1140.972611] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.981190] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.391421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c804ba74-7691-4d63-afc0-6f20ffbc44db tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.148s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.431743] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52dc4b77-fd52-1d22-2bfa-b78138fbbdab, 'name': SearchDatastore_Task, 'duration_secs': 0.019932} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.432037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.432298] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17/c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1141.432546] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ded006e1-ae32-4693-bf9e-9ff232e1d0f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.439159] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1141.439159] env[62109]: value = "task-1117288" [ 1141.439159] env[62109]: _type = "Task" [ 1141.439159] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.446562] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.481755] env[62109]: DEBUG oslo_vmware.api [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155685} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.482015] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.482260] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1141.482437] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1141.482613] env[62109]: INFO nova.compute.manager [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1141.482854] env[62109]: DEBUG oslo.service.loopingcall [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.483077] env[62109]: DEBUG nova.compute.manager [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1141.483368] env[62109]: DEBUG nova.network.neutron [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.837775] env[62109]: DEBUG nova.compute.utils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1141.884928] env[62109]: DEBUG nova.compute.manager [req-dac8fe6a-d34d-445c-a6a7-342caf434beb req-e3c1e90d-1f99-4f7a-818f-6ba53a9f1ff8 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Received event network-vif-deleted-17f64e3a-f17f-4264-a8a8-8fc27a55283b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1141.885465] env[62109]: INFO nova.compute.manager [req-dac8fe6a-d34d-445c-a6a7-342caf434beb req-e3c1e90d-1f99-4f7a-818f-6ba53a9f1ff8 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Neutron deleted interface 17f64e3a-f17f-4264-a8a8-8fc27a55283b; detaching it from the instance and deleting it from the info cache [ 1141.885798] env[62109]: DEBUG nova.network.neutron [req-dac8fe6a-d34d-445c-a6a7-342caf434beb req-e3c1e90d-1f99-4f7a-818f-6ba53a9f1ff8 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.949294] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476809} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.952047] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17/c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1141.952047] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1141.952047] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfefea90-ab91-4f66-a1c8-3dfcdd616631 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.957392] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1141.957392] env[62109]: value = "task-1117289" [ 1141.957392] env[62109]: _type = "Task" [ 1141.957392] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.967927] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.343361] env[62109]: INFO nova.virt.block_device [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Booting with volume 3d953331-6f18-4580-a50a-a728e86a4128 at /dev/sdb [ 1142.358903] env[62109]: DEBUG nova.network.neutron [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.377171] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e988fd05-6b2b-4595-8838-a54c615e999d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.387264] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee1eeab-51f8-473b-90c7-bd5a021d717c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.397824] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80548854-4f3a-4ffc-85b0-4f96b2a5856c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.405965] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919527d2-af28-43a5-b5ad-e9ce14b59adb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.422568] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5459a23-1d05-4a0b-a651-3b7bb8b41fab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.429990] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5224eb98-61ad-42dc-8781-2b59809f4a04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.445846] env[62109]: DEBUG nova.compute.manager [req-dac8fe6a-d34d-445c-a6a7-342caf434beb req-e3c1e90d-1f99-4f7a-818f-6ba53a9f1ff8 service nova] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Detach interface failed, port_id=17f64e3a-f17f-4264-a8a8-8fc27a55283b, reason: Instance 06af64af-5448-49f9-9d5a-12dad1bdfe29 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1142.463791] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265fbf67-cf44-4926-b91b-6c854bc720d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.470996] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066381} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.473035] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.473677] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdb8f63-e61c-45e2-b007-e14adffff88d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.476344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43d2b79-c341-4c77-9966-39ac960d43ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.499990] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17/c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.503605] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67e34a6f-8605-4bf4-ac3c-bb3f3da7b94d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.517741] env[62109]: DEBUG nova.virt.block_device [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating existing volume attachment record: 98500752-f369-4888-aebb-aea12d6ebd61 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1142.526257] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1142.526257] env[62109]: value = "task-1117290" [ 1142.526257] env[62109]: _type = "Task" [ 1142.526257] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.537576] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117290, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.861236] env[62109]: INFO nova.compute.manager [-] [instance: 06af64af-5448-49f9-9d5a-12dad1bdfe29] Took 1.38 seconds to deallocate network for instance. [ 1143.036739] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117290, 'name': ReconfigVM_Task, 'duration_secs': 0.289737} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.037059] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Reconfigured VM instance instance-0000006d to attach disk [datastore1] c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17/c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.037644] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a81bc49a-4c46-40ac-a370-afd2abb9d243 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.044144] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1143.044144] env[62109]: value = "task-1117294" [ 1143.044144] env[62109]: _type = "Task" [ 1143.044144] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.052202] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117294, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.368296] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.368636] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.368876] env[62109]: DEBUG nova.objects.instance [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 06af64af-5448-49f9-9d5a-12dad1bdfe29 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.554449] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117294, 'name': Rename_Task, 'duration_secs': 0.136253} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.554717] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1143.556043] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a7b32e9-f383-496f-8f7e-ae22e7928e78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.561479] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1143.561479] env[62109]: value = "task-1117295" [ 1143.561479] env[62109]: _type = "Task" [ 1143.561479] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.568704] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.912255] env[62109]: DEBUG nova.compute.manager [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Received event network-changed-8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1143.912447] env[62109]: DEBUG nova.compute.manager [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Refreshing instance network info cache due to event network-changed-8a4912e9-48e5-4762-aad9-050359873623. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1143.912742] env[62109]: DEBUG oslo_concurrency.lockutils [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.912832] env[62109]: DEBUG oslo_concurrency.lockutils [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.912974] env[62109]: DEBUG nova.network.neutron [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Refreshing network info cache for port 8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1143.976016] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb7cd09-cc86-4e93-9657-cfe478123609 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.983658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac41875-a65d-4381-8b82-b12155a103e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.017072] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea94d9a-1d1b-4988-b126-d04b49c43fc6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.024970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d179ae80-312c-42e3-9708-be9289e64aa2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.038736] env[62109]: DEBUG nova.compute.provider_tree [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.070857] env[62109]: DEBUG oslo_vmware.api [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117295, 'name': PowerOnVM_Task, 'duration_secs': 0.477584} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.071093] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1144.071305] env[62109]: INFO nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1144.071491] env[62109]: DEBUG nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1144.072231] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804d661b-722b-44c9-8455-91fb7f48d894 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.543598] env[62109]: DEBUG nova.scheduler.client.report [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1144.588047] env[62109]: INFO nova.compute.manager [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Took 15.17 seconds to build instance. [ 1144.778368] env[62109]: DEBUG nova.network.neutron [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updated VIF entry in instance network info cache for port 8a4912e9-48e5-4762-aad9-050359873623. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1144.778850] env[62109]: DEBUG nova.network.neutron [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.053240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.074059] env[62109]: INFO nova.scheduler.client.report [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 06af64af-5448-49f9-9d5a-12dad1bdfe29 [ 1145.089142] env[62109]: DEBUG oslo_concurrency.lockutils [None req-181f173e-ac8d-455d-abcd-c9d7b197ec47 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.677s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.282052] env[62109]: DEBUG oslo_concurrency.lockutils [req-da96e755-836a-4ad3-908e-1d2adef7b422 req-fc869a92-18f9-4fa9-86d5-e39e1652e136 service nova] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.319908] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.319908] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.320237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.320444] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.320645] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.322776] env[62109]: INFO nova.compute.manager [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Terminating instance [ 1145.324741] env[62109]: DEBUG nova.compute.manager [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1145.324949] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1145.325790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589599d2-a1ee-487a-aaa6-5bb30b0b614c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.333332] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1145.333619] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a42add24-9810-408a-b891-5435efdabf29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.339800] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1145.339800] env[62109]: value = "task-1117297" [ 1145.339800] env[62109]: _type = "Task" [ 1145.339800] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.349393] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.581557] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5a11914f-cf4a-47c3-a9e0-d3c69623964c tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "06af64af-5448-49f9-9d5a-12dad1bdfe29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.263s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.850657] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117297, 'name': PowerOffVM_Task, 'duration_secs': 0.165239} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.851297] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1145.851622] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1145.851776] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2eeb845e-113b-4136-b177-065dd6505116 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.918807] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1145.919072] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1145.919241] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Deleting the datastore file [datastore1] c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1145.919496] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe488a25-03cd-4f8a-89df-2c860f5edd1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.926142] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for the task: (returnval){ [ 1145.926142] env[62109]: value = "task-1117299" [ 1145.926142] env[62109]: _type = "Task" [ 1145.926142] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.934414] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.226080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.226326] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.435758] env[62109]: DEBUG oslo_vmware.api [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Task: {'id': task-1117299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144111} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.437063] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.437063] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1146.437063] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1146.437063] env[62109]: INFO nova.compute.manager [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1146.437292] env[62109]: DEBUG oslo.service.loopingcall [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1146.437330] env[62109]: DEBUG nova.compute.manager [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1146.437409] env[62109]: DEBUG nova.network.neutron [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1146.719930] env[62109]: DEBUG nova.compute.manager [req-caf0c308-3c88-4ade-a190-3b84ad160d54 req-a92cc1c5-b6d7-49eb-b683-4d2b7502d528 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Received event network-vif-deleted-7acb7bd6-0ed9-4960-8302-e1956fde3555 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1146.720162] env[62109]: INFO nova.compute.manager [req-caf0c308-3c88-4ade-a190-3b84ad160d54 req-a92cc1c5-b6d7-49eb-b683-4d2b7502d528 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Neutron deleted interface 7acb7bd6-0ed9-4960-8302-e1956fde3555; detaching it from the instance and deleting it from the info cache [ 1146.720342] env[62109]: DEBUG nova.network.neutron [req-caf0c308-3c88-4ade-a190-3b84ad160d54 req-a92cc1c5-b6d7-49eb-b683-4d2b7502d528 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.728934] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1147.197529] env[62109]: DEBUG nova.network.neutron [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.223458] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0271c599-a008-4eab-b6ac-46f1e2acdfd5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.233232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba499093-b8fc-4d7f-b1d7-0bf1fcff70bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.264033] env[62109]: DEBUG nova.compute.manager [req-caf0c308-3c88-4ade-a190-3b84ad160d54 req-a92cc1c5-b6d7-49eb-b683-4d2b7502d528 service nova] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Detach interface failed, port_id=7acb7bd6-0ed9-4960-8302-e1956fde3555, reason: Instance c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1147.265215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.265447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.266981] env[62109]: INFO nova.compute.claims [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1147.700091] env[62109]: INFO nova.compute.manager [-] [instance: c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17] Took 1.26 seconds to deallocate network for instance. [ 1148.113762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.206131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.355175] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf7f138-a372-47fa-8f1c-8ca779856a58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.362311] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9850534-07dd-4ea4-89ea-dbf6f5528983 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.391065] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd17c41-335f-4a7e-ae1a-547279121b7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.398994] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346c998e-9602-452f-bbde-3d13510899cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.412337] env[62109]: DEBUG nova.compute.provider_tree [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.915873] env[62109]: DEBUG nova.scheduler.client.report [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1149.420944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.421587] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1149.426118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.313s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.426339] env[62109]: DEBUG nova.objects.instance [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'pci_requests' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.931041] env[62109]: DEBUG nova.compute.utils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1149.933284] env[62109]: DEBUG nova.objects.instance [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'numa_topology' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.934535] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1149.934734] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1149.978541] env[62109]: DEBUG nova.policy [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1150.213215] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Successfully created port: 726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.435216] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1150.438077] env[62109]: INFO nova.compute.claims [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1151.078143] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.449870] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1151.481479] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1151.481902] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1151.482091] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.482334] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1151.482499] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.482653] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1151.482898] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1151.483034] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1151.483213] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1151.483381] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1151.483559] env[62109]: DEBUG nova.virt.hardware [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1151.484440] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02ea3ab-2c95-49b4-bbec-0356ce24a331 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.495581] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76d2ecd-3481-4b10-b3dd-86a02603ac37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.554786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77e2226-9590-4e0e-87fd-a072b93690dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.562411] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0de8d5-9e82-4d8b-ae63-d703612b3cde {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.595757] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Getting list of instances from cluster (obj){ [ 1151.595757] env[62109]: value = "domain-c8" [ 1151.595757] env[62109]: _type = "ClusterComputeResource" [ 1151.595757] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1151.596852] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939852d0-ced2-4ba8-ad38-e98bc3b5367b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.600208] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9793030c-53c1-4469-9d87-0cc8f81d3f13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.611644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313a5875-4805-422b-a95f-69b53aef6ef4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.615805] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Got total of 3 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1151.615964] env[62109]: WARNING nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] While synchronizing instance power states, found 7 instances in the database and 3 instances on the hypervisor. [ 1151.616125] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.616318] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.616481] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 8a64a700-e381-49a0-89ae-8a678ed7a4fb {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.616640] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.616794] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid fdc4486a-4837-4006-87c8-166cd5c41fcd {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.616952] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.617129] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Triggering sync for uuid 05158016-bd14-4a6b-b9d5-b8ebfb8063c1 {{(pid=62109) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1151.617427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.617655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.617926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.618803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.618803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.618803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.618803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.619117] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.619202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.619433] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.619644] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.620254] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d71fea6e-20d9-46f0-a021-912ae0f5fb46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.622477] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe5566d-4dee-45f2-91a5-4b20b669e8c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.625032] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4230fd-9592-4104-97ed-d2f6c30ce8a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.627654] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea77833-5478-4916-86d9-b3fca30ecf83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.637201] env[62109]: DEBUG nova.compute.provider_tree [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.648360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b970f1c-9866-4e2f-9d6d-58c93812c346 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.697615] env[62109]: DEBUG nova.compute.manager [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Received event network-vif-plugged-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1151.698107] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] Acquiring lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.698350] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.698542] env[62109]: DEBUG oslo_concurrency.lockutils [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.698795] env[62109]: DEBUG nova.compute.manager [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] No waiting events found dispatching network-vif-plugged-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.698979] env[62109]: WARNING nova.compute.manager [req-7a501402-9dd9-406c-8602-dc05e266204e req-ad06403e-5416-4e54-b7f1-58fcaab9ce14 service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Received unexpected event network-vif-plugged-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 for instance with vm_state building and task_state spawning. [ 1151.777160] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Successfully updated port: 726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1152.142396] env[62109]: DEBUG nova.scheduler.client.report [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1152.164562] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.164938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.165518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.182509] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.279518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.279795] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.279795] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1152.647415] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.221s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.649827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.444s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.650252] env[62109]: DEBUG nova.objects.instance [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lazy-loading 'resources' on Instance uuid c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.677829] env[62109]: INFO nova.network.neutron [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating port a04abfce-a9e7-413a-94d6-d14ed8f205cb with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1152.812626] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1152.946194] env[62109]: DEBUG nova.network.neutron [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Updating instance_info_cache with network_info: [{"id": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "address": "fa:16:3e:e9:38:31", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap726fd50f-5e", "ovs_interfaceid": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.252136] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47370ce2-13fb-452b-a66d-2473b5198318 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.260048] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df7b78c-0ec1-4af1-b8ac-d1acf29c0eeb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.289243] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f284e21-abf6-4cd5-ab2e-5dd2f943b882 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.296301] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91863b9f-45f3-4b9e-b153-85f85e8cd94b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.310583] env[62109]: DEBUG nova.compute.provider_tree [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.449034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.449034] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Instance network_info: |[{"id": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "address": "fa:16:3e:e9:38:31", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap726fd50f-5e", "ovs_interfaceid": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1153.449379] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:38:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.456896] env[62109]: DEBUG oslo.service.loopingcall [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.457144] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1153.457370] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b0f6758-cfd9-4923-bfb6-cd2c1f7d0672 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.479369] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.479369] env[62109]: value = "task-1117300" [ 1153.479369] env[62109]: _type = "Task" [ 1153.479369] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.487041] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117300, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.755019] env[62109]: DEBUG nova.compute.manager [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Received event network-changed-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1153.755270] env[62109]: DEBUG nova.compute.manager [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Refreshing instance network info cache due to event network-changed-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1153.755457] env[62109]: DEBUG oslo_concurrency.lockutils [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] Acquiring lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.755606] env[62109]: DEBUG oslo_concurrency.lockutils [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] Acquired lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.755773] env[62109]: DEBUG nova.network.neutron [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Refreshing network info cache for port 726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1153.814216] env[62109]: DEBUG nova.scheduler.client.report [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1153.989608] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117300, 'name': CreateVM_Task, 'duration_secs': 0.311201} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.989831] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1153.990551] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.990789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.991134] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.991380] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef863c80-6298-4329-9fdf-e4c864f90c80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.996057] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1153.996057] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5277ca07-fb75-71ae-8862-061a5820bcc9" [ 1153.996057] env[62109]: _type = "Task" [ 1153.996057] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.004383] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5277ca07-fb75-71ae-8862-061a5820bcc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.142184] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.142403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.142586] env[62109]: DEBUG nova.network.neutron [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1154.320047] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.339790] env[62109]: INFO nova.scheduler.client.report [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Deleted allocations for instance c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17 [ 1154.507015] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5277ca07-fb75-71ae-8862-061a5820bcc9, 'name': SearchDatastore_Task, 'duration_secs': 0.010016} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.507868] env[62109]: DEBUG nova.network.neutron [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Updated VIF entry in instance network info cache for port 726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1154.508217] env[62109]: DEBUG nova.network.neutron [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Updating instance_info_cache with network_info: [{"id": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "address": "fa:16:3e:e9:38:31", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap726fd50f-5e", "ovs_interfaceid": "726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.509563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.509709] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.509948] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.510114] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.510299] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.510780] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5524095-52c7-4dc6-a5aa-d1bbc7225914 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.520073] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.520270] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1154.521257] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eecfe5a-ca15-4bc2-8932-401e5f9ce702 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.527269] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1154.527269] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bbc7-6838-6be3-9458-f67a5075eaec" [ 1154.527269] env[62109]: _type = "Task" [ 1154.527269] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.535434] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bbc7-6838-6be3-9458-f67a5075eaec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.847631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eb1d1237-e382-49d0-8349-a50e7ea01c75 tempest-ServerGroupTestJSON-1496661847 tempest-ServerGroupTestJSON-1496661847-project-member] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.528s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.848746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.229s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.849225] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f96c1062-a0fc-404b-a796-eec4d42130d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.860861] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ad23b5-0688-42a0-84d2-744394f1ab9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.920198] env[62109]: DEBUG nova.network.neutron [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.011877] env[62109]: DEBUG oslo_concurrency.lockutils [req-d633a93d-552c-4d22-9b5f-73c43d330bc7 req-df126c3f-33ac-486b-9b39-aa1b6928436a service nova] Releasing lock "refresh_cache-05158016-bd14-4a6b-b9d5-b8ebfb8063c1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.037655] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5297bbc7-6838-6be3-9458-f67a5075eaec, 'name': SearchDatastore_Task, 'duration_secs': 0.009578} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.038551] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1fb119e-4ab7-481f-8f82-c17cab4cb8ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.043605] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1155.043605] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6f04e-ddc1-f257-6c97-7bbad8249931" [ 1155.043605] env[62109]: _type = "Task" [ 1155.043605] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.052978] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6f04e-ddc1-f257-6c97-7bbad8249931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.391745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "c508eff3-b0e0-4d40-8b67-5bfb7e6a8d17" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.422553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.452990] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='dc271d01340d87595137a73c9f901ad6',container_format='bare',created_at=2024-10-03T08:01:46Z,direct_url=,disk_format='vmdk',id=dcff6ca5-4589-4dff-8ead-c334a5a65018,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2110102030-shelved',owner='430353b9a427408494b462b49f11354a',properties=ImageMetaProps,protected=,size=31664128,status='active',tags=,updated_at=2024-10-03T08:02:02Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1155.453281] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1155.453445] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.453633] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1155.453786] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.453985] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1155.454819] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1155.455090] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1155.455332] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1155.455685] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1155.455969] env[62109]: DEBUG nova.virt.hardware [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1155.457077] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cc59b5-4f5d-4408-b7a6-4983dceb1658 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.467541] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb989bc-e337-492a-98a6-f478638c7a0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.484977] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:c9:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a04abfce-a9e7-413a-94d6-d14ed8f205cb', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.494105] env[62109]: DEBUG oslo.service.loopingcall [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1155.494499] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1155.494782] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17162d67-bda8-45e8-8ec6-37db2e0bbf71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.518760] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.518760] env[62109]: value = "task-1117301" [ 1155.518760] env[62109]: _type = "Task" [ 1155.518760] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.528764] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117301, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.554223] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6f04e-ddc1-f257-6c97-7bbad8249931, 'name': SearchDatastore_Task, 'duration_secs': 0.009061} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.554569] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.554845] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 05158016-bd14-4a6b-b9d5-b8ebfb8063c1/05158016-bd14-4a6b-b9d5-b8ebfb8063c1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1155.555507] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bade918c-8809-4afa-b896-09fc9a06bf1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.563978] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1155.563978] env[62109]: value = "task-1117302" [ 1155.563978] env[62109]: _type = "Task" [ 1155.563978] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.575114] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.790179] env[62109]: DEBUG nova.compute.manager [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1155.790428] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.790725] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.790945] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.791128] env[62109]: DEBUG nova.compute.manager [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] No waiting events found dispatching network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1155.791247] env[62109]: WARNING nova.compute.manager [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received unexpected event network-vif-plugged-a04abfce-a9e7-413a-94d6-d14ed8f205cb for instance with vm_state shelved_offloaded and task_state spawning. [ 1155.791411] env[62109]: DEBUG nova.compute.manager [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1155.791567] env[62109]: DEBUG nova.compute.manager [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing instance network info cache due to event network-changed-a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1155.791826] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.791984] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.792166] env[62109]: DEBUG nova.network.neutron [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Refreshing network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1156.034826] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117301, 'name': CreateVM_Task, 'duration_secs': 0.486645} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.035154] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1156.035720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.035921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.036330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1156.036601] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b918a09c-a709-4018-8258-94c8e5e86dbd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.041977] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1156.041977] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524ae07a-9eee-7d2f-e8d7-ba68aa071c6a" [ 1156.041977] env[62109]: _type = "Task" [ 1156.041977] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.056991] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524ae07a-9eee-7d2f-e8d7-ba68aa071c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.074639] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117302, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.499510] env[62109]: DEBUG nova.network.neutron [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updated VIF entry in instance network info cache for port a04abfce-a9e7-413a-94d6-d14ed8f205cb. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1156.499921] env[62109]: DEBUG nova.network.neutron [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.552926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.553142] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Processing image dcff6ca5-4589-4dff-8ead-c334a5a65018 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1156.553392] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.553698] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.553793] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.554040] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18d65be2-7000-45c7-b1ba-e715eeb0f368 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.563501] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.563701] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1156.564499] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b2925a-376b-4fd4-9720-760851cc633c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.572812] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1156.572812] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b4721b-8d30-36a8-b81a-3468d17cbecf" [ 1156.572812] env[62109]: _type = "Task" [ 1156.572812] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.576393] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52359} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.579621] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 05158016-bd14-4a6b-b9d5-b8ebfb8063c1/05158016-bd14-4a6b-b9d5-b8ebfb8063c1.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1156.579842] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1156.580101] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e16a6267-2d6d-444a-a807-26d838e5fa81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.587808] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1156.588063] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Fetch image to [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979/OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1156.588257] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Downloading stream optimized image dcff6ca5-4589-4dff-8ead-c334a5a65018 to [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979/OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979.vmdk on the data store datastore1 as vApp {{(pid=62109) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1156.588434] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Downloading image file data dcff6ca5-4589-4dff-8ead-c334a5a65018 to the ESX as VM named 'OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979' {{(pid=62109) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1156.590692] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1156.590692] env[62109]: value = "task-1117303" [ 1156.590692] env[62109]: _type = "Task" [ 1156.590692] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.599312] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.673684] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1156.673684] env[62109]: value = "resgroup-9" [ 1156.673684] env[62109]: _type = "ResourcePool" [ 1156.673684] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1156.674387] env[62109]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0c531ab9-85e0-4a04-a48a-4e96cc51f896 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.694806] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease: (returnval){ [ 1156.694806] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1156.694806] env[62109]: _type = "HttpNfcLease" [ 1156.694806] env[62109]: } obtained for vApp import into resource pool (val){ [ 1156.694806] env[62109]: value = "resgroup-9" [ 1156.694806] env[62109]: _type = "ResourcePool" [ 1156.694806] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1156.695214] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the lease: (returnval){ [ 1156.695214] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1156.695214] env[62109]: _type = "HttpNfcLease" [ 1156.695214] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1156.701891] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1156.701891] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1156.701891] env[62109]: _type = "HttpNfcLease" [ 1156.701891] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1157.003285] env[62109]: DEBUG oslo_concurrency.lockutils [req-d28df002-1067-48ee-a8cb-4b7c024ed7a2 req-917c1951-5972-4d65-b0b9-d93bac7d2aa0 service nova] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.100700] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061257} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.100700] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1157.101580] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3ef750-ff6e-4498-98d4-c36dd672e514 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.125357] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 05158016-bd14-4a6b-b9d5-b8ebfb8063c1/05158016-bd14-4a6b-b9d5-b8ebfb8063c1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.125651] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d8c4cf8-f9f5-4cd2-b62f-a744deef0595 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.147590] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1157.147590] env[62109]: value = "task-1117305" [ 1157.147590] env[62109]: _type = "Task" [ 1157.147590] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.156809] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117305, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.203609] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1157.203609] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1157.203609] env[62109]: _type = "HttpNfcLease" [ 1157.203609] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1157.657122] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117305, 'name': ReconfigVM_Task, 'duration_secs': 0.307983} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.657419] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 05158016-bd14-4a6b-b9d5-b8ebfb8063c1/05158016-bd14-4a6b-b9d5-b8ebfb8063c1.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.658115] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-537bf9c2-0ff1-42ae-89cb-ad96513b7a0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.664409] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1157.664409] env[62109]: value = "task-1117306" [ 1157.664409] env[62109]: _type = "Task" [ 1157.664409] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.674092] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117306, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.704106] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1157.704106] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1157.704106] env[62109]: _type = "HttpNfcLease" [ 1157.704106] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1157.704567] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1157.704567] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5294756f-1e42-5001-8bc5-cdeeee3a6185" [ 1157.704567] env[62109]: _type = "HttpNfcLease" [ 1157.704567] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1157.705818] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacc5dbd-12d2-40e2-ae2e-ed29319f7920 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.716220] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1157.716523] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HTTP connection to write to file with size = 31664128 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1157.802741] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b49c8931-614b-4325-aace-52ada1d6445b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.176073] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117306, 'name': Rename_Task, 'duration_secs': 0.180879} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.176073] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1158.176073] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-890eea12-7c93-424d-a800-69d7574d6c98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.185017] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1158.185017] env[62109]: value = "task-1117307" [ 1158.185017] env[62109]: _type = "Task" [ 1158.185017] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.193186] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.695712] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117307, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.190966] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.196268] env[62109]: DEBUG oslo_vmware.api [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117307, 'name': PowerOnVM_Task, 'duration_secs': 0.580746} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.197027] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1159.197027] env[62109]: INFO nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1159.197156] env[62109]: DEBUG nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1159.197778] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036c6637-6b69-4027-be0a-49f8e448d38d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.646348] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.715124] env[62109]: INFO nova.compute.manager [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Took 12.47 seconds to build instance. [ 1160.217536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-73ec0402-e8c0-4a03-b352-8799526d4537 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.991s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.217536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.598s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.217856] env[62109]: INFO nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] During sync_power_state the instance has a pending task (spawning). Skip. [ 1160.217856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.761788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.762036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.762259] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.762451] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.762661] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.764992] env[62109]: INFO nova.compute.manager [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Terminating instance [ 1160.766765] env[62109]: DEBUG nova.compute.manager [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1160.766970] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1160.767818] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94067099-a588-41bb-be0d-d9d63cc0b58a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.775792] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1160.776321] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfb9836a-fff6-42f3-83ce-6339726c1d25 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.782161] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1160.782161] env[62109]: value = "task-1117308" [ 1160.782161] env[62109]: _type = "Task" [ 1160.782161] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.790305] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.161642] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1161.161951] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1161.162930] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5133ca52-184e-4cf5-acaf-5a3023e1412b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.170009] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1161.170216] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1161.170461] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-8a738ccf-e734-4b7e-ad9f-b1a0f56d9a7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.292375] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117308, 'name': PowerOffVM_Task, 'duration_secs': 0.264833} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.292728] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1161.292793] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1161.293084] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7fbefad-23e5-40f1-a436-43293c0375d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.408308] env[62109]: DEBUG oslo_vmware.rw_handles [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f087c-c12e-2875-f33b-507e2e503055/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1161.408541] env[62109]: INFO nova.virt.vmwareapi.images [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Downloaded image file data dcff6ca5-4589-4dff-8ead-c334a5a65018 [ 1161.409409] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf0fd3b-d03e-4d1d-bef7-0fb764786ea1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.426555] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-749252a7-81ae-4c4a-8aec-8d1bcc46d852 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.460489] env[62109]: INFO nova.virt.vmwareapi.images [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] The imported VM was unregistered [ 1161.463222] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1161.463488] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.463763] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17569cde-529c-4e0e-bca6-368de30332af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.474992] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.475205] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979/OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979.vmdk to [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk. {{(pid=62109) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1161.475456] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-55be411c-4e2d-4d20-b40b-b187bbbad68e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.482168] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1161.482168] env[62109]: value = "task-1117311" [ 1161.482168] env[62109]: _type = "Task" [ 1161.482168] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.490597] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.646514] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.646760] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.646923] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.647108] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.647253] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1161.995809] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.495324] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.648090] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.648329] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1162.790561] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1162.790830] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1162.791106] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore1] 05158016-bd14-4a6b-b9d5-b8ebfb8063c1 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.791408] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90d61a49-d5bf-4ad8-9f8d-cdd382f9c952 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.799978] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1162.799978] env[62109]: value = "task-1117312" [ 1162.799978] env[62109]: _type = "Task" [ 1162.799978] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.808798] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.993512] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.310844] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.494388] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.811456] env[62109]: DEBUG oslo_vmware.api [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.868996} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.811456] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.811893] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1163.811930] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1163.812109] env[62109]: INFO nova.compute.manager [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Took 3.05 seconds to destroy the instance on the hypervisor. [ 1163.812355] env[62109]: DEBUG oslo.service.loopingcall [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1163.812553] env[62109]: DEBUG nova.compute.manager [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1163.812648] env[62109]: DEBUG nova.network.neutron [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1163.995954] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117311, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.174624} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.996220] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979/OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979.vmdk to [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk. [ 1163.996375] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Cleaning up location [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1163.999716] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c6aae7c8-aabe-4691-b620-d89255c60979 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.999991] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f381e02-34eb-482c-a1b2-76b6b58321ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.009674] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1164.009674] env[62109]: value = "task-1117313" [ 1164.009674] env[62109]: _type = "Task" [ 1164.009674] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.018893] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.103684] env[62109]: DEBUG nova.compute.manager [req-c965ee3b-3e65-469a-b55e-2b2ec19696ad req-0239168e-b68e-4940-bd90-5aa3a10b5d8c service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Received event network-vif-deleted-726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.103967] env[62109]: INFO nova.compute.manager [req-c965ee3b-3e65-469a-b55e-2b2ec19696ad req-0239168e-b68e-4940-bd90-5aa3a10b5d8c service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Neutron deleted interface 726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0; detaching it from the instance and deleting it from the info cache [ 1164.104381] env[62109]: DEBUG nova.network.neutron [req-c965ee3b-3e65-469a-b55e-2b2ec19696ad req-0239168e-b68e-4940-bd90-5aa3a10b5d8c service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.520344] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086726} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.520608] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.520816] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1164.521104] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk to [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1164.521368] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74b02d06-595a-4c69-ba83-9192036bbf71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.528009] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1164.528009] env[62109]: value = "task-1117314" [ 1164.528009] env[62109]: _type = "Task" [ 1164.528009] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.536358] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.580910] env[62109]: DEBUG nova.network.neutron [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.607186] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-313be1d1-266c-4c3d-98ba-2bb16a140094 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.617489] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd05c0f-4823-4035-90a0-6bbf716c4b26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.645312] env[62109]: DEBUG nova.compute.manager [req-c965ee3b-3e65-469a-b55e-2b2ec19696ad req-0239168e-b68e-4940-bd90-5aa3a10b5d8c service nova] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Detach interface failed, port_id=726fd50f-5e16-4db8-a3b5-2e3ec6cd47b0, reason: Instance 05158016-bd14-4a6b-b9d5-b8ebfb8063c1 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1165.039902] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.083756] env[62109]: INFO nova.compute.manager [-] [instance: 05158016-bd14-4a6b-b9d5-b8ebfb8063c1] Took 1.27 seconds to deallocate network for instance. [ 1165.164463] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1165.165197] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.165197] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1165.539617] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.591080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.591388] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.591718] env[62109]: DEBUG nova.objects.instance [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 05158016-bd14-4a6b-b9d5-b8ebfb8063c1 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.039492] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.192163] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd79a762-cd69-46a1-a711-0813796453c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.200773] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1273ddd-e8f6-4eb7-ba23-7c04d8b73bf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.234960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d216b329-6204-4c6a-b3ee-a57036f673c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.243818] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4faea67f-3473-4dcd-9a0b-fd2f49aa3843 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.259114] env[62109]: DEBUG nova.compute.provider_tree [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.437324] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [{"id": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "address": "fa:16:3e:53:c9:6c", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa04abfce-a9", "ovs_interfaceid": "a04abfce-a9e7-413a-94d6-d14ed8f205cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.539228] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.559125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.559426] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.559592] env[62109]: INFO nova.compute.manager [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Shelving [ 1166.762280] env[62109]: DEBUG nova.scheduler.client.report [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.940349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-1b3d7fa7-5428-460e-ab47-49c6d38f24a5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.940560] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1166.940868] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.042485] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117314, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.153171} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.042808] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/dcff6ca5-4589-4dff-8ead-c334a5a65018/dcff6ca5-4589-4dff-8ead-c334a5a65018.vmdk to [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1167.043882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca3598b-1d6c-4e08-880f-c24c47eb935b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.065744] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.067848] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c807020-51ac-418d-991a-9c8accf0eb6f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.083837] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1167.084388] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3c21efa-e865-48c5-97c5-b976a1b1c501 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.090149] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1167.090149] env[62109]: value = "task-1117315" [ 1167.090149] env[62109]: _type = "Task" [ 1167.090149] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.094246] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1167.094246] env[62109]: value = "task-1117316" [ 1167.094246] env[62109]: _type = "Task" [ 1167.094246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.100650] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117315, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.106670] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.267735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.286206] env[62109]: INFO nova.scheduler.client.report [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 05158016-bd14-4a6b-b9d5-b8ebfb8063c1 [ 1167.443805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.444118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.444348] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.444507] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1167.445447] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b636b0bf-aa39-4a8a-8974-5fc40980026f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.453797] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1beafa5d-0edc-40c8-8e68-8113129b539c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.467809] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0747ed-c041-4501-a89e-8b19701a1eb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.474218] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f14b7-44c1-424a-8e2c-b1adb27d27d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.504014] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180418MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1167.504202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.504364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.602742] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117315, 'name': ReconfigVM_Task, 'duration_secs': 0.358844} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.603486] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5/1b3d7fa7-5428-460e-ab47-49c6d38f24a5.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.604784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'encryption_format': None, 'device_type': 'disk', 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'image_id': '6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244564', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'name': 'volume-3d953331-6f18-4580-a50a-a728e86a4128', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '1b3d7fa7-5428-460e-ab47-49c6d38f24a5', 'attached_at': '', 'detached_at': '', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'serial': '3d953331-6f18-4580-a50a-a728e86a4128'}, 'device_type': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'attachment_id': '98500752-f369-4888-aebb-aea12d6ebd61', 'guest_format': None, 'boot_index': None, 'volume_type': None}], 'swap': None} {{(pid=62109) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1167.605023] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Volume attach. Driver type: vmdk {{(pid=62109) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1167.605252] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244564', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'name': 'volume-3d953331-6f18-4580-a50a-a728e86a4128', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '1b3d7fa7-5428-460e-ab47-49c6d38f24a5', 'attached_at': '', 'detached_at': '', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'serial': '3d953331-6f18-4580-a50a-a728e86a4128'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1167.608669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed212bbf-8f84-4103-a967-0c4e3f52f10c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.611099] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117316, 'name': PowerOffVM_Task, 'duration_secs': 0.215926} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.611340] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1167.612393] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e345ca8-1b5b-4292-8057-9e30b439fc0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.625627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc2efa5-4408-4ce0-8e86-9b3831d1d09b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.641156] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2213f5a-195a-4fc1-a9ad-38d9406cb03e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.662440] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-3d953331-6f18-4580-a50a-a728e86a4128/volume-3d953331-6f18-4580-a50a-a728e86a4128.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.663612] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ed5afd1-e93d-4bc9-abe9-dea5785affb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.685078] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1167.685078] env[62109]: value = "task-1117317" [ 1167.685078] env[62109]: _type = "Task" [ 1167.685078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.692853] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.797649] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9baea675-c1dc-48ff-a4bf-834dcf654379 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "05158016-bd14-4a6b-b9d5-b8ebfb8063c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.035s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.182035] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1168.182299] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-54e1ec79-1003-4b54-83eb-39116409068f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.190975] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1168.190975] env[62109]: value = "task-1117318" [ 1168.190975] env[62109]: _type = "Task" [ 1168.190975] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.196964] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.203331] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117318, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.530050] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.530313] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8a64a700-e381-49a0-89ae-8a678ed7a4fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.530505] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance c7a95d76-b143-45ce-87b3-de0b63e53169 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.530690] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance fdc4486a-4837-4006-87c8-166cd5c41fcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.530869] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.531150] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1168.531363] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1168.603829] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0575b96-6227-4a60-90cd-a146e9e4a22d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.612488] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45ec183-121b-4bb7-b48d-7d40376d644c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.644866] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a0bb1e-1a18-4164-99ba-dee8018c850d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.652856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fd176b-9be3-4d12-b193-6b52c0a85f56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.666974] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.698058] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117317, 'name': ReconfigVM_Task, 'duration_secs': 0.746669} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.698058] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-3d953331-6f18-4580-a50a-a728e86a4128/volume-3d953331-6f18-4580-a50a-a728e86a4128.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1168.705549] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-056115a1-4704-4fe4-9203-c27c4fa9ca75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.715135] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117318, 'name': CreateSnapshot_Task, 'duration_secs': 0.445593} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.715667] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1168.716723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541c452a-69e5-4ba2-81b2-b97054d9199f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.721612] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1168.721612] env[62109]: value = "task-1117319" [ 1168.721612] env[62109]: _type = "Task" [ 1168.721612] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.735942] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117319, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.087709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "364c7902-428f-4173-9a1d-934d1daf8dc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.087946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.170448] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1169.236652] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1169.240327] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2118226e-609c-4f1d-8320-746b7eada377 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.243271] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117319, 'name': ReconfigVM_Task, 'duration_secs': 0.1428} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.243560] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244564', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'name': 'volume-3d953331-6f18-4580-a50a-a728e86a4128', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '1b3d7fa7-5428-460e-ab47-49c6d38f24a5', 'attached_at': '', 'detached_at': '', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'serial': '3d953331-6f18-4580-a50a-a728e86a4128'} {{(pid=62109) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1169.244427] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66f28f76-c49a-4438-a9ae-60827805da78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.250135] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1169.250135] env[62109]: value = "task-1117320" [ 1169.250135] env[62109]: _type = "Task" [ 1169.250135] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.251360] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1169.251360] env[62109]: value = "task-1117321" [ 1169.251360] env[62109]: _type = "Task" [ 1169.251360] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.264079] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117320, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.264274] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117321, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.590860] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1169.675763] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1169.675970] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.172s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.764572] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117320, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.767241] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117321, 'name': Rename_Task, 'duration_secs': 0.368878} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.767468] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1169.767705] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c46653e5-e018-48a2-94ff-2ca4e2d9ed03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.774262] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1169.774262] env[62109]: value = "task-1117322" [ 1169.774262] env[62109]: _type = "Task" [ 1169.774262] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.781616] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.111045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.111323] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.112767] env[62109]: INFO nova.compute.claims [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.262798] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117320, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.284909] env[62109]: DEBUG oslo_vmware.api [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117322, 'name': PowerOnVM_Task, 'duration_secs': 0.465637} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.285190] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1170.390134] env[62109]: DEBUG nova.compute.manager [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1170.391157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c448d8-ad32-45e9-a448-15e3bd3a2b61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.763426] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117320, 'name': CloneVM_Task, 'duration_secs': 1.254072} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.763705] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Created linked-clone VM from snapshot [ 1170.764647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a954db-a974-4b73-8d2b-6c496d55654b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.772037] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Uploading image 111e8adf-1386-4dec-af3b-fd9616cd90d5 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1170.792380] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1170.792380] env[62109]: value = "vm-244569" [ 1170.792380] env[62109]: _type = "VirtualMachine" [ 1170.792380] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1170.792636] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-75442fbf-3a0f-4809-8337-01442deaad10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.799578] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease: (returnval){ [ 1170.799578] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a99083-fd10-4e00-8e25-95dea1db3def" [ 1170.799578] env[62109]: _type = "HttpNfcLease" [ 1170.799578] env[62109]: } obtained for exporting VM: (result){ [ 1170.799578] env[62109]: value = "vm-244569" [ 1170.799578] env[62109]: _type = "VirtualMachine" [ 1170.799578] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1170.799867] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the lease: (returnval){ [ 1170.799867] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a99083-fd10-4e00-8e25-95dea1db3def" [ 1170.799867] env[62109]: _type = "HttpNfcLease" [ 1170.799867] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1170.806122] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1170.806122] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a99083-fd10-4e00-8e25-95dea1db3def" [ 1170.806122] env[62109]: _type = "HttpNfcLease" [ 1170.806122] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1170.910248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-85e8881a-c2d5-4fcb-a083-ad244e6f3e34 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.082s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.911261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.293s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.911504] env[62109]: INFO nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] During sync_power_state the instance has a pending task (spawning). Skip. [ 1170.911695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.209250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a0e2e9-9d9e-45b8-baaa-8b6503b8c7d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.217651] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1535699-357b-4d61-a71a-406cfbc4d8a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.247988] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd871e7-2fa6-4a66-93b2-7d57c57fb3e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.255984] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62d8b47-9a27-4ec8-bd65-49c1d8a20e10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.268961] env[62109]: DEBUG nova.compute.provider_tree [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.308371] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1171.308371] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a99083-fd10-4e00-8e25-95dea1db3def" [ 1171.308371] env[62109]: _type = "HttpNfcLease" [ 1171.308371] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1171.308648] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1171.308648] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a99083-fd10-4e00-8e25-95dea1db3def" [ 1171.308648] env[62109]: _type = "HttpNfcLease" [ 1171.308648] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1171.309538] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9124a054-cf52-4d86-9c0e-22dfc8cef420 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.316455] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1171.316630] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1171.403366] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-67738f6f-cbe4-4d1d-98f6-8ac336180436 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.772128] env[62109]: DEBUG nova.scheduler.client.report [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1172.277981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.166s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.278618] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1172.669589] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.783804] env[62109]: DEBUG nova.compute.utils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1172.785458] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1172.786373] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1172.828190] env[62109]: DEBUG nova.policy [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1173.137058] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Successfully created port: 4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.291028] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1174.302024] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1174.327024] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1174.327284] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1174.327466] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.327683] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1174.327839] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.328042] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1174.328276] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1174.328439] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1174.328609] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1174.328777] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1174.328959] env[62109]: DEBUG nova.virt.hardware [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1174.329840] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845216b3-18e1-46fb-aad9-9ba99999c5a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.338387] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b99ec3-30ee-4b9d-9380-85b5d64fc837 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.511519] env[62109]: DEBUG nova.compute.manager [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Received event network-vif-plugged-4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1174.511739] env[62109]: DEBUG oslo_concurrency.lockutils [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] Acquiring lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.511960] env[62109]: DEBUG oslo_concurrency.lockutils [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.512149] env[62109]: DEBUG oslo_concurrency.lockutils [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.512378] env[62109]: DEBUG nova.compute.manager [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] No waiting events found dispatching network-vif-plugged-4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1174.512594] env[62109]: WARNING nova.compute.manager [req-e9188c20-498f-4dbc-ac17-46ec8c19a59a req-a090eddf-8de1-4fa6-8289-7779418ce7d7 service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Received unexpected event network-vif-plugged-4614eaa6-23c7-403d-b647-39895157b961 for instance with vm_state building and task_state spawning. [ 1175.067084] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Successfully updated port: 4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.090615] env[62109]: DEBUG nova.compute.manager [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Received event network-changed-4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1175.090615] env[62109]: DEBUG nova.compute.manager [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Refreshing instance network info cache due to event network-changed-4614eaa6-23c7-403d-b647-39895157b961. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1175.090615] env[62109]: DEBUG oslo_concurrency.lockutils [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] Acquiring lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.091070] env[62109]: DEBUG oslo_concurrency.lockutils [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] Acquired lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.091070] env[62109]: DEBUG nova.network.neutron [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Refreshing network info cache for port 4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1175.569639] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.623842] env[62109]: DEBUG nova.network.neutron [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1175.698307] env[62109]: DEBUG nova.network.neutron [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.201065] env[62109]: DEBUG oslo_concurrency.lockutils [req-26fda42f-103d-4373-881b-846c736d8ade req-e0826dbc-863e-45c3-a492-2cd3d941aa5c service nova] Releasing lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.202054] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.202054] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1176.733087] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1176.854960] env[62109]: DEBUG nova.network.neutron [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Updating instance_info_cache with network_info: [{"id": "4614eaa6-23c7-403d-b647-39895157b961", "address": "fa:16:3e:cf:a3:21", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4614eaa6-23", "ovs_interfaceid": "4614eaa6-23c7-403d-b647-39895157b961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.357476] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-364c7902-428f-4173-9a1d-934d1daf8dc3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.357749] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Instance network_info: |[{"id": "4614eaa6-23c7-403d-b647-39895157b961", "address": "fa:16:3e:cf:a3:21", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4614eaa6-23", "ovs_interfaceid": "4614eaa6-23c7-403d-b647-39895157b961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1177.358232] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:a3:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4614eaa6-23c7-403d-b647-39895157b961', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.365800] env[62109]: DEBUG oslo.service.loopingcall [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.366054] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1177.366299] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cd2787b-d48d-4062-9ea9-59d0c375783d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.388695] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.388695] env[62109]: value = "task-1117324" [ 1177.388695] env[62109]: _type = "Task" [ 1177.388695] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.396672] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117324, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.900400] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117324, 'name': CreateVM_Task, 'duration_secs': 0.337698} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.900683] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1177.901597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.901835] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.902270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1177.902603] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-898ef87e-d4e4-4f8d-82c7-95a538559bab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.908585] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1177.908585] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52133c10-b12c-9877-7923-b1776e508e6e" [ 1177.908585] env[62109]: _type = "Task" [ 1177.908585] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.917956] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52133c10-b12c-9877-7923-b1776e508e6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.418867] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52133c10-b12c-9877-7923-b1776e508e6e, 'name': SearchDatastore_Task, 'duration_secs': 0.012086} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.419230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.419471] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.419718] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.419869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.420068] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.420339] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58cd74e3-a238-493f-9ca8-eafc5827fc4e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.429050] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.429241] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1178.429972] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71030eb3-8ac4-45cc-9d8e-f51fdc884b5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.435354] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1178.435354] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b79f21-458c-d1d8-acb4-e0581ef2b9a7" [ 1178.435354] env[62109]: _type = "Task" [ 1178.435354] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.442774] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b79f21-458c-d1d8-acb4-e0581ef2b9a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.946684] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b79f21-458c-d1d8-acb4-e0581ef2b9a7, 'name': SearchDatastore_Task, 'duration_secs': 0.00875} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.947572] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42f86309-4dbb-4350-9104-590df8315885 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.953400] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1178.953400] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e6a1c1-4ae8-58ce-4443-5b8f4d66ca71" [ 1178.953400] env[62109]: _type = "Task" [ 1178.953400] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.961452] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e6a1c1-4ae8-58ce-4443-5b8f4d66ca71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.093535] env[62109]: DEBUG nova.compute.manager [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Stashing vm_state: active {{(pid=62109) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1179.464177] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e6a1c1-4ae8-58ce-4443-5b8f4d66ca71, 'name': SearchDatastore_Task, 'duration_secs': 0.011599} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.464472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.464733] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 364c7902-428f-4173-9a1d-934d1daf8dc3/364c7902-428f-4173-9a1d-934d1daf8dc3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1179.465013] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2203f08-a5da-454c-ba72-89126d693724 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.471407] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1179.471407] env[62109]: value = "task-1117325" [ 1179.471407] env[62109]: _type = "Task" [ 1179.471407] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.478594] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.615715] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.615994] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.982471] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117325, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.120513] env[62109]: INFO nova.compute.claims [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.482979] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67685} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.484034] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 364c7902-428f-4173-9a1d-934d1daf8dc3/364c7902-428f-4173-9a1d-934d1daf8dc3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1180.484034] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.484221] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fbaa52a-b5f0-43c2-9d31-976af9bb08d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.491145] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1180.491145] env[62109]: value = "task-1117326" [ 1180.491145] env[62109]: _type = "Task" [ 1180.491145] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.499763] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.626887] env[62109]: INFO nova.compute.resource_tracker [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating resource usage from migration 084cd1ff-21dd-45d3-887b-de34c1622573 [ 1180.718766] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deda84c2-866e-4321-8f09-04cf31be31bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.726267] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb8197a-fc66-480b-9d46-0d82ead86136 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.757535] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9eb1cd-5f53-4ee7-9dbd-55c65b723e8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.765479] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e898570-3732-468c-992f-28342d0ba90c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.779388] env[62109]: DEBUG nova.compute.provider_tree [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.000837] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077514} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.001213] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.001823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6be0544-d1c9-4522-8f6e-2ffcd1fafece {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.023473] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 364c7902-428f-4173-9a1d-934d1daf8dc3/364c7902-428f-4173-9a1d-934d1daf8dc3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.023830] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44f52bd0-506a-4d5e-83e8-cb1762b7f22d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.043805] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1181.043805] env[62109]: value = "task-1117327" [ 1181.043805] env[62109]: _type = "Task" [ 1181.043805] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.051822] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.093462] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1181.094394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacb9641-825f-4213-ace7-68f6294fa148 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.100098] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1181.100283] env[62109]: ERROR oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk due to incomplete transfer. [ 1181.100495] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a88f742a-54c5-4187-8d37-811a91641f02 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.106652] env[62109]: DEBUG oslo_vmware.rw_handles [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244f502-6495-45ac-32ab-f4a643cbdfcd/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1181.106865] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Uploaded image 111e8adf-1386-4dec-af3b-fd9616cd90d5 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1181.109762] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1181.110010] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9b3a3e68-3742-4529-85ba-2bd10a8852fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.114932] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1181.114932] env[62109]: value = "task-1117328" [ 1181.114932] env[62109]: _type = "Task" [ 1181.114932] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.122252] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117328, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.282911] env[62109]: DEBUG nova.scheduler.client.report [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1181.554124] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117327, 'name': ReconfigVM_Task, 'duration_secs': 0.352723} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.554409] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 364c7902-428f-4173-9a1d-934d1daf8dc3/364c7902-428f-4173-9a1d-934d1daf8dc3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.555047] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0bf97f3-9b1d-4dcd-85e1-d3b059401744 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.561094] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1181.561094] env[62109]: value = "task-1117329" [ 1181.561094] env[62109]: _type = "Task" [ 1181.561094] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.568869] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117329, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.625079] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117328, 'name': Destroy_Task, 'duration_secs': 0.393627} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.625359] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Destroyed the VM [ 1181.625620] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1181.625893] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-36a62e8d-8d84-4cd5-a98b-1bc9db23baf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.632189] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1181.632189] env[62109]: value = "task-1117330" [ 1181.632189] env[62109]: _type = "Task" [ 1181.632189] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.639743] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117330, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.788252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.172s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.788466] env[62109]: INFO nova.compute.manager [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Migrating [ 1182.071745] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117329, 'name': Rename_Task, 'duration_secs': 0.13674} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.072126] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1182.072227] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bb72161-5180-465f-a541-904356a28813 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.078961] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1182.078961] env[62109]: value = "task-1117331" [ 1182.078961] env[62109]: _type = "Task" [ 1182.078961] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.085963] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.141968] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117330, 'name': RemoveSnapshot_Task, 'duration_secs': 0.342402} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.142276] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1182.142560] env[62109]: DEBUG nova.compute.manager [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1182.143356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac6498c-baee-453b-9a59-6803fcf74270 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.304537] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.304757] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.304964] env[62109]: DEBUG nova.network.neutron [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1182.589189] env[62109]: DEBUG oslo_vmware.api [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117331, 'name': PowerOnVM_Task, 'duration_secs': 0.439427} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.589469] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1182.589672] env[62109]: INFO nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1182.589854] env[62109]: DEBUG nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1182.590622] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a33838-24e4-4cd2-867c-5cccaa33100c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.655950] env[62109]: INFO nova.compute.manager [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Shelve offloading [ 1182.657960] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1182.658219] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe7d32c1-741c-4af8-bbbe-a027ebc90882 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.665867] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1182.665867] env[62109]: value = "task-1117332" [ 1182.665867] env[62109]: _type = "Task" [ 1182.665867] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.673607] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.105941] env[62109]: INFO nova.compute.manager [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Took 13.01 seconds to build instance. [ 1183.176582] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1183.176849] env[62109]: DEBUG nova.compute.manager [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1183.177722] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33850244-6871-4323-857c-07e41c0b12c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.183924] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.184048] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.184276] env[62109]: DEBUG nova.network.neutron [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1183.225331] env[62109]: DEBUG nova.network.neutron [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.607850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-243c9d2a-065a-4a34-ac99-766c7723f0d2 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.520s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.728013] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.824890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.825111] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.889501] env[62109]: DEBUG nova.network.neutron [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.330051] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1184.395184] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.642941] env[62109]: DEBUG nova.compute.manager [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-vif-unplugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1184.643189] env[62109]: DEBUG oslo_concurrency.lockutils [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.643413] env[62109]: DEBUG oslo_concurrency.lockutils [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.643586] env[62109]: DEBUG oslo_concurrency.lockutils [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.643760] env[62109]: DEBUG nova.compute.manager [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] No waiting events found dispatching network-vif-unplugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1184.643939] env[62109]: WARNING nova.compute.manager [req-a7037079-f191-4271-8eb7-29c64b21d025 req-a681ec10-0fa0-4283-83cc-94c77df3bca0 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received unexpected event network-vif-unplugged-c190fa8c-8d46-43de-943d-554e47a2fe5c for instance with vm_state shelved and task_state shelving_offloading. [ 1184.857884] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.858217] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.859722] env[62109]: INFO nova.compute.claims [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.962938] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1184.963933] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d25666-bb8b-4457-9640-e594ef824c5d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.972767] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1184.973034] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10ec0b28-16d8-4b23-94e7-870c26db2aae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.043375] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1185.043734] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1185.043928] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleting the datastore file [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.044252] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81aa814-14ac-464c-b794-43596538c796 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.053971] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1185.053971] env[62109]: value = "task-1117334" [ 1185.053971] env[62109]: _type = "Task" [ 1185.053971] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.063070] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.244161] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0c6181-5692-4615-8b1a-c4892230dff0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.265052] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 0 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1185.566218] env[62109]: DEBUG oslo_vmware.api [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217819} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.566578] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1185.566649] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1185.566780] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1185.583904] env[62109]: INFO nova.scheduler.client.report [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted allocations for instance c7a95d76-b143-45ce-87b3-de0b63e53169 [ 1185.771746] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1185.772082] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c36489d-5691-4baa-b0bb-7eb621262760 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.781422] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1185.781422] env[62109]: value = "task-1117335" [ 1185.781422] env[62109]: _type = "Task" [ 1185.781422] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.790398] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.965345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421fa182-8356-40aa-aa1d-4bfa724f130e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.972489] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8946db-273e-4aa6-93d0-4da199c43844 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.001850] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d34b626-88bd-41c8-9f4c-c11da2391110 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.008882] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e766317-dae9-4f38-9740-d04bc7cb8dc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.022404] env[62109]: DEBUG nova.compute.provider_tree [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.088571] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.292422] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117335, 'name': PowerOffVM_Task, 'duration_secs': 0.176168} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.292718] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1186.292910] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 17 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1186.525886] env[62109]: DEBUG nova.scheduler.client.report [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1186.675151] env[62109]: DEBUG nova.compute.manager [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1186.675419] env[62109]: DEBUG nova.compute.manager [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing instance network info cache due to event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1186.675659] env[62109]: DEBUG oslo_concurrency.lockutils [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.675835] env[62109]: DEBUG oslo_concurrency.lockutils [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.676063] env[62109]: DEBUG nova.network.neutron [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1186.800344] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1186.800739] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1186.801060] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.801378] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1186.801640] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.802095] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1186.802294] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1186.802564] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1186.802840] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1186.803056] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1186.803254] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1186.808891] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-092dd3a8-af5f-496b-8a4a-d84c3f85acf2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.825834] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1186.825834] env[62109]: value = "task-1117336" [ 1186.825834] env[62109]: _type = "Task" [ 1186.825834] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.834240] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.031423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.172s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.031423] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1187.033266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.945s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.033427] env[62109]: DEBUG nova.objects.instance [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'resources' on Instance uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.335902] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117336, 'name': ReconfigVM_Task, 'duration_secs': 0.274938} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.336154] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 33 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1187.382590] env[62109]: DEBUG nova.network.neutron [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updated VIF entry in instance network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1187.383265] env[62109]: DEBUG nova.network.neutron [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": null, "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.536289] env[62109]: DEBUG nova.compute.utils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1187.537897] env[62109]: DEBUG nova.objects.instance [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'numa_topology' on Instance uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.539205] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1187.539349] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1187.577302] env[62109]: DEBUG nova.policy [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1187.812311] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Successfully created port: a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1187.842726] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1187.842985] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1187.843162] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.843352] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1187.843502] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.843717] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1187.843889] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1187.844070] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1187.844244] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1187.844411] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1187.844589] env[62109]: DEBUG nova.virt.hardware [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1187.850074] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1187.850380] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c631b5b5-d378-4f71-9396-1010d38f1f26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.869895] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1187.869895] env[62109]: value = "task-1117337" [ 1187.869895] env[62109]: _type = "Task" [ 1187.869895] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.878137] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117337, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.890895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.891502] env[62109]: DEBUG oslo_concurrency.lockutils [req-9268e70a-3726-4595-b2ed-3e614f812bb1 req-e4d8b558-3d41-4409-bfab-afe0e360edea service nova] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.040167] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1188.042974] env[62109]: DEBUG nova.objects.base [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1188.147041] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab20c289-af23-471a-8a3c-1ba21574308a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.154740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f311a18-4a91-4de2-9eef-014fbc80f96b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.186884] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8878402-8ffe-402d-9402-20e6f0e5cf57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.194328] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfec5b1-f7a6-4d71-a838-c683aebc17a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.207184] env[62109]: DEBUG nova.compute.provider_tree [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.380615] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117337, 'name': ReconfigVM_Task, 'duration_secs': 0.155431} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.381973] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1188.381973] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2c1508-5635-43ac-b6c2-eb550cf94c66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.404934] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.404934] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf65d370-3877-4ede-b340-10213273a3cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.423129] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1188.423129] env[62109]: value = "task-1117338" [ 1188.423129] env[62109]: _type = "Task" [ 1188.423129] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.430800] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.711055] env[62109]: DEBUG nova.scheduler.client.report [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1188.935756] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117338, 'name': ReconfigVM_Task, 'duration_secs': 0.273} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.936120] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfigured VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1188.936297] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 50 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1189.054221] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1189.081513] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1189.081752] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1189.081936] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.082154] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1189.082310] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.082460] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1189.082670] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1189.082831] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1189.083007] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1189.083199] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1189.083411] env[62109]: DEBUG nova.virt.hardware [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1189.084317] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44b5db4-d952-48ba-86d0-eb1822e30ef2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.092157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500f3aa2-2218-4367-b3e8-8be4e16441ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.216644] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.183s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.258533] env[62109]: DEBUG nova.compute.manager [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Received event network-vif-plugged-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1189.258822] env[62109]: DEBUG oslo_concurrency.lockutils [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] Acquiring lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.259064] env[62109]: DEBUG oslo_concurrency.lockutils [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.259246] env[62109]: DEBUG oslo_concurrency.lockutils [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.259415] env[62109]: DEBUG nova.compute.manager [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] No waiting events found dispatching network-vif-plugged-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1189.259587] env[62109]: WARNING nova.compute.manager [req-cb8ed2a4-4feb-4c7f-a9ba-e4ecfb541888 req-544243d5-8c2d-412e-a391-1d110f1b29ed service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Received unexpected event network-vif-plugged-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e for instance with vm_state building and task_state spawning. [ 1189.378345] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Successfully updated port: a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.443241] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b8ae69-2529-45a2-92a3-81828ac9101d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.463566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e71548e-48f3-4dbc-aaa9-a47614235b0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.480875] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 67 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1189.725168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cd816527-bacf-41fe-8b8d-f1ffd3efc101 tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.166s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.726591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.835s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.726591] env[62109]: INFO nova.compute.manager [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Unshelving [ 1189.883016] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.883267] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.883404] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1190.020138] env[62109]: DEBUG nova.network.neutron [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Port 8a4912e9-48e5-4762-aad9-050359873623 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1190.415096] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1190.591175] env[62109]: DEBUG nova.network.neutron [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Updating instance_info_cache with network_info: [{"id": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "address": "fa:16:3e:63:bf:53", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a6a51f-10", "ovs_interfaceid": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.752431] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.752715] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.752921] env[62109]: DEBUG nova.objects.instance [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'pci_requests' on Instance uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.041929] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.041929] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.042368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.093905] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.094217] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Instance network_info: |[{"id": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "address": "fa:16:3e:63:bf:53", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a6a51f-10", "ovs_interfaceid": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1191.094628] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:bf:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8a6a51f-10bc-4c16-bcad-4cddb9938e8e', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.102296] env[62109]: DEBUG oslo.service.loopingcall [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.102500] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1191.102719] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d233d4b2-389c-4ebd-a67c-d955d531cea4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.123910] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.123910] env[62109]: value = "task-1117339" [ 1191.123910] env[62109]: _type = "Task" [ 1191.123910] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.131445] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117339, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.256859] env[62109]: DEBUG nova.objects.instance [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'numa_topology' on Instance uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.284499] env[62109]: DEBUG nova.compute.manager [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Received event network-changed-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1191.284693] env[62109]: DEBUG nova.compute.manager [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Refreshing instance network info cache due to event network-changed-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1191.284910] env[62109]: DEBUG oslo_concurrency.lockutils [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] Acquiring lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.285070] env[62109]: DEBUG oslo_concurrency.lockutils [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] Acquired lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.285236] env[62109]: DEBUG nova.network.neutron [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Refreshing network info cache for port a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1191.633783] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117339, 'name': CreateVM_Task, 'duration_secs': 0.370705} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.633966] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1191.634657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.634833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.635183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1191.635441] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a90ba30-a198-4c1e-8082-3d427c10670a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.639834] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1191.639834] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fe0ed-6f4c-589d-38d6-a70c1677d7bf" [ 1191.639834] env[62109]: _type = "Task" [ 1191.639834] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.648517] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fe0ed-6f4c-589d-38d6-a70c1677d7bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.759940] env[62109]: INFO nova.compute.claims [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1192.018081] env[62109]: DEBUG nova.network.neutron [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Updated VIF entry in instance network info cache for port a8a6a51f-10bc-4c16-bcad-4cddb9938e8e. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1192.018510] env[62109]: DEBUG nova.network.neutron [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Updating instance_info_cache with network_info: [{"id": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "address": "fa:16:3e:63:bf:53", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a6a51f-10", "ovs_interfaceid": "a8a6a51f-10bc-4c16-bcad-4cddb9938e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.075484] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.075774] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.075858] env[62109]: DEBUG nova.network.neutron [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1192.149873] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]528fe0ed-6f4c-589d-38d6-a70c1677d7bf, 'name': SearchDatastore_Task, 'duration_secs': 0.014799} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.150130] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.150365] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.150599] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.150745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.150949] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.151225] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9076b63f-2209-47a7-b2b6-2d9949544487 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.158408] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.158581] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1192.159277] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c33223b-c406-4feb-9d8f-5bd8a716443c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.163673] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1192.163673] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a76812-2b7d-0da6-dbce-5cd9836094d4" [ 1192.163673] env[62109]: _type = "Task" [ 1192.163673] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.171008] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a76812-2b7d-0da6-dbce-5cd9836094d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.521698] env[62109]: DEBUG oslo_concurrency.lockutils [req-60f79df6-c3eb-4653-8fdb-eeaab99e793e req-05c56d47-30f0-42fb-9855-71fce2d2b827 service nova] Releasing lock "refresh_cache-13b24cd7-f0ae-4679-9f9d-5a914e1a2970" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.674288] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52a76812-2b7d-0da6-dbce-5cd9836094d4, 'name': SearchDatastore_Task, 'duration_secs': 0.008717} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.676943] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8fe0f0-2e45-403b-844d-a90a4908e10d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.683171] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1192.683171] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52494afa-a9bf-6ae5-7a91-fac096edd80b" [ 1192.683171] env[62109]: _type = "Task" [ 1192.683171] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.691105] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52494afa-a9bf-6ae5-7a91-fac096edd80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.833872] env[62109]: DEBUG nova.network.neutron [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.868147] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85f4bc5-9de3-4ba3-8ddb-dcda136e8839 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.876220] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c24407-0db2-4c49-b06a-fcbdee9a3b59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.907782] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dd4718-2a33-4ca3-98e7-3d6ad60622a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.915292] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db151a87-a5a5-4fdf-8a48-0f6f41586d2b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.928438] env[62109]: DEBUG nova.compute.provider_tree [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.194185] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52494afa-a9bf-6ae5-7a91-fac096edd80b, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.194465] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.194717] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 13b24cd7-f0ae-4679-9f9d-5a914e1a2970/13b24cd7-f0ae-4679-9f9d-5a914e1a2970.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1193.194980] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fe04c12-a280-45b9-a0b9-d655e416b256 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.201860] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1193.201860] env[62109]: value = "task-1117340" [ 1193.201860] env[62109]: _type = "Task" [ 1193.201860] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.336748] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.436065] env[62109]: DEBUG nova.scheduler.client.report [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1193.712315] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117340, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.858337] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117560a0-ff1d-4f30-b70e-c66201e73b26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.876852] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48030dc-b82b-444f-9cfb-358f5110ccca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.884019] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 83 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1193.941059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.187s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.966547] env[62109]: INFO nova.network.neutron [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating port c190fa8c-8d46-43de-943d-554e47a2fe5c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1194.212841] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117340, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.389840] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1194.390210] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f346d9f-5de8-466a-9bdc-41ac41a9edda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.397520] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1194.397520] env[62109]: value = "task-1117341" [ 1194.397520] env[62109]: _type = "Task" [ 1194.397520] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.406601] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.713910] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117340, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.50238} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.714339] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 13b24cd7-f0ae-4679-9f9d-5a914e1a2970/13b24cd7-f0ae-4679-9f9d-5a914e1a2970.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1194.714569] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.714832] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23099923-a887-41a8-8fcf-15a60c406a47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.721368] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1194.721368] env[62109]: value = "task-1117342" [ 1194.721368] env[62109]: _type = "Task" [ 1194.721368] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.728741] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.907240] env[62109]: DEBUG oslo_vmware.api [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117341, 'name': PowerOnVM_Task, 'duration_secs': 0.374257} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.907516] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1194.907709] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-b6beaccc-ed95-456c-963b-636c35b70660 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance 'fdc4486a-4837-4006-87c8-166cd5c41fcd' progress to 100 {{(pid=62109) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1195.231422] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067443} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.231784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1195.232557] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5323933-d18a-40f0-82c9-d568bb42b0da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.254315] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 13b24cd7-f0ae-4679-9f9d-5a914e1a2970/13b24cd7-f0ae-4679-9f9d-5a914e1a2970.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.254571] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-196c31b6-30f2-4fc1-b7fb-903d4d266924 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.272926] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1195.272926] env[62109]: value = "task-1117343" [ 1195.272926] env[62109]: _type = "Task" [ 1195.272926] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.280332] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.459043] env[62109]: DEBUG nova.compute.manager [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1195.459341] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.459572] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.459744] env[62109]: DEBUG oslo_concurrency.lockutils [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.459942] env[62109]: DEBUG nova.compute.manager [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] No waiting events found dispatching network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1195.460274] env[62109]: WARNING nova.compute.manager [req-9d82df1c-1a88-4493-97d4-73b14826973e req-de39c77b-1a06-4653-819a-43402c73f8a8 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received unexpected event network-vif-plugged-c190fa8c-8d46-43de-943d-554e47a2fe5c for instance with vm_state shelved_offloaded and task_state spawning. [ 1195.550619] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.550844] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.551113] env[62109]: DEBUG nova.network.neutron [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1195.783452] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117343, 'name': ReconfigVM_Task, 'duration_secs': 0.280153} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.783749] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 13b24cd7-f0ae-4679-9f9d-5a914e1a2970/13b24cd7-f0ae-4679-9f9d-5a914e1a2970.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.784413] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51c684e3-9e09-4f8b-835f-e9a2b2117de6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.791369] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1195.791369] env[62109]: value = "task-1117344" [ 1195.791369] env[62109]: _type = "Task" [ 1195.791369] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.798571] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117344, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.302164] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117344, 'name': Rename_Task, 'duration_secs': 0.155143} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.303104] env[62109]: DEBUG nova.network.neutron [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.304159] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1196.304560] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d421680-c5a7-48c5-a3c0-493f42f6ef09 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.311974] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1196.311974] env[62109]: value = "task-1117345" [ 1196.311974] env[62109]: _type = "Task" [ 1196.311974] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.323705] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.805829] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.821420] env[62109]: DEBUG oslo_vmware.api [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117345, 'name': PowerOnVM_Task, 'duration_secs': 0.423941} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.821616] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1196.821823] env[62109]: INFO nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Took 7.77 seconds to spawn the instance on the hypervisor. [ 1196.822053] env[62109]: DEBUG nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1196.822915] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a9ec16-cb1e-44a2-839d-798b27d9999c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.839885] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='217ecf5ede8ad20005556edbe654a049',container_format='bare',created_at=2024-10-03T08:02:35Z,direct_url=,disk_format='vmdk',id=111e8adf-1386-4dec-af3b-fd9616cd90d5,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-759699528-shelved',owner='a363548894df47d5981199004e9884de',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-10-03T08:02:50Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1196.840159] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1196.840327] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.840513] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1196.840663] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.840814] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1196.841067] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1196.841254] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1196.841426] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1196.841597] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1196.841775] env[62109]: DEBUG nova.virt.hardware [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1196.842689] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55be1c4d-9084-4328-a2b2-3f487898767e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.850357] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0862b0c-4600-44d1-aee2-bfc98bc272bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.864544] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:75:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c190fa8c-8d46-43de-943d-554e47a2fe5c', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.872272] env[62109]: DEBUG oslo.service.loopingcall [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.875819] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1196.876506] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faccdcfb-acd9-4641-8a36-067c7332e919 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.897602] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.897602] env[62109]: value = "task-1117346" [ 1196.897602] env[62109]: _type = "Task" [ 1196.897602] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.905553] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117346, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.946505] env[62109]: DEBUG nova.network.neutron [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Port 8a4912e9-48e5-4762-aad9-050359873623 binding to destination host cpu-1 is already ACTIVE {{(pid=62109) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1196.946696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.946852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.947034] env[62109]: DEBUG nova.network.neutron [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1197.341961] env[62109]: INFO nova.compute.manager [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Took 12.51 seconds to build instance. [ 1197.407266] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117346, 'name': CreateVM_Task, 'duration_secs': 0.39411} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.407434] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1197.408109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.408282] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.408654] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1197.408913] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b365f95-d21c-46be-a748-1e2fda0f5c83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.413172] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1197.413172] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52aa619c-d31a-f640-f3a8-197dfddb1117" [ 1197.413172] env[62109]: _type = "Task" [ 1197.413172] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.422464] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52aa619c-d31a-f640-f3a8-197dfddb1117, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.488757] env[62109]: DEBUG nova.compute.manager [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.489110] env[62109]: DEBUG nova.compute.manager [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing instance network info cache due to event network-changed-c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1197.489462] env[62109]: DEBUG oslo_concurrency.lockutils [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] Acquiring lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.489735] env[62109]: DEBUG oslo_concurrency.lockutils [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] Acquired lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.490036] env[62109]: DEBUG nova.network.neutron [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Refreshing network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1197.780310] env[62109]: DEBUG nova.network.neutron [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.844480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dcce1bc6-91e5-45aa-8610-fcf245047386 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.019s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.924612] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.924928] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Processing image 111e8adf-1386-4dec-af3b-fd9616cd90d5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.925190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.925347] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.925606] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.925871] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abdfb468-da24-4808-b421-e4bce1dc9de7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.944336] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.944528] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1197.945272] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df6baa59-3e45-44ea-bb47-2f1e1f194620 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.950285] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1197.950285] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52cb36a9-26b1-8efb-28aa-c19d39108754" [ 1197.950285] env[62109]: _type = "Task" [ 1197.950285] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.958351] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52cb36a9-26b1-8efb-28aa-c19d39108754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.233079] env[62109]: DEBUG nova.network.neutron [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updated VIF entry in instance network info cache for port c190fa8c-8d46-43de-943d-554e47a2fe5c. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1198.233494] env[62109]: DEBUG nova.network.neutron [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [{"id": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "address": "fa:16:3e:2e:75:e5", "network": {"id": "d94d8212-82e2-48ee-886c-cc17952af453", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-43902406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a363548894df47d5981199004e9884de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc190fa8c-8d", "ovs_interfaceid": "c190fa8c-8d46-43de-943d-554e47a2fe5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.282732] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.408646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.409035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.409742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.409972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.410176] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.412467] env[62109]: INFO nova.compute.manager [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Terminating instance [ 1198.414422] env[62109]: DEBUG nova.compute.manager [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1198.414637] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.415471] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085e3921-ffa8-4423-adc1-82d98f590ece {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.423058] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1198.423300] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc532e31-a537-4beb-a9c3-320a8e27db59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.430382] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1198.430382] env[62109]: value = "task-1117347" [ 1198.430382] env[62109]: _type = "Task" [ 1198.430382] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.437780] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.460401] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1198.460725] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Fetch image to [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d/OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1198.460945] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Downloading stream optimized image 111e8adf-1386-4dec-af3b-fd9616cd90d5 to [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d/OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d.vmdk on the data store datastore2 as vApp {{(pid=62109) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1198.461177] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Downloading image file data 111e8adf-1386-4dec-af3b-fd9616cd90d5 to the ESX as VM named 'OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d' {{(pid=62109) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1198.540176] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1198.540176] env[62109]: value = "resgroup-9" [ 1198.540176] env[62109]: _type = "ResourcePool" [ 1198.540176] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1198.540736] env[62109]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-156ebf84-52b8-4071-b329-27426a416b88 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.562991] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease: (returnval){ [ 1198.562991] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f9230d-303b-b8dd-1978-b362e5c92381" [ 1198.562991] env[62109]: _type = "HttpNfcLease" [ 1198.562991] env[62109]: } obtained for vApp import into resource pool (val){ [ 1198.562991] env[62109]: value = "resgroup-9" [ 1198.562991] env[62109]: _type = "ResourcePool" [ 1198.562991] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1198.563378] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the lease: (returnval){ [ 1198.563378] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f9230d-303b-b8dd-1978-b362e5c92381" [ 1198.563378] env[62109]: _type = "HttpNfcLease" [ 1198.563378] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1198.569953] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1198.569953] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f9230d-303b-b8dd-1978-b362e5c92381" [ 1198.569953] env[62109]: _type = "HttpNfcLease" [ 1198.569953] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1198.736282] env[62109]: DEBUG oslo_concurrency.lockutils [req-d99adf38-7d7c-4b78-a50f-006256b4fb6d req-f000fcfc-7f61-44cf-86df-fa29eaf5ea13 service nova] Releasing lock "refresh_cache-c7a95d76-b143-45ce-87b3-de0b63e53169" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.785824] env[62109]: DEBUG nova.compute.manager [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62109) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1198.786131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.786411] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.941986] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117347, 'name': PowerOffVM_Task, 'duration_secs': 0.220258} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.942347] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1198.942548] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1198.942835] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfdf4c73-e320-463d-97fe-e0fb50f3e260 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.071730] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1199.071730] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f9230d-303b-b8dd-1978-b362e5c92381" [ 1199.071730] env[62109]: _type = "HttpNfcLease" [ 1199.071730] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1199.072059] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1199.072059] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f9230d-303b-b8dd-1978-b362e5c92381" [ 1199.072059] env[62109]: _type = "HttpNfcLease" [ 1199.072059] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1199.072774] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a56a92-1dbf-490d-959f-8f94962139c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.079806] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1199.079988] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1199.145189] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-78879604-2313-47cc-a57a-a5eb596fdba3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.259269] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1199.259527] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1199.259718] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore1] 13b24cd7-f0ae-4679-9f9d-5a914e1a2970 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.259989] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e224746a-72ac-48f9-814a-ef924ecc5625 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.266246] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1199.266246] env[62109]: value = "task-1117350" [ 1199.266246] env[62109]: _type = "Task" [ 1199.266246] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.273883] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.289470] env[62109]: DEBUG nova.objects.instance [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'migration_context' on Instance uuid fdc4486a-4837-4006-87c8-166cd5c41fcd {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.814810] env[62109]: DEBUG oslo_vmware.api [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146577} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.815965] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.815965] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1199.816774] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1199.816774] env[62109]: INFO nova.compute.manager [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1199.816774] env[62109]: DEBUG oslo.service.loopingcall [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1199.816945] env[62109]: DEBUG nova.compute.manager [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1199.817425] env[62109]: DEBUG nova.network.neutron [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1199.928992] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb558b7-ae7c-456f-ab0c-8233356a52cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.940937] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904839f8-5090-4092-ad8a-a317b409d8ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.986963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf5d2a7-f3a5-4a26-81bd-f344f184d98f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.995676] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31d8d43-60ba-4b4c-aeb4-028f5b677db6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.014916] env[62109]: DEBUG nova.compute.provider_tree [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.096357] env[62109]: DEBUG nova.compute.manager [req-1e8c09d7-af33-4c89-9273-0a157695216f req-c453475f-c929-4bb3-8772-e66dd8c9a325 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Received event network-vif-deleted-a8a6a51f-10bc-4c16-bcad-4cddb9938e8e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1200.096587] env[62109]: INFO nova.compute.manager [req-1e8c09d7-af33-4c89-9273-0a157695216f req-c453475f-c929-4bb3-8772-e66dd8c9a325 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Neutron deleted interface a8a6a51f-10bc-4c16-bcad-4cddb9938e8e; detaching it from the instance and deleting it from the info cache [ 1200.096765] env[62109]: DEBUG nova.network.neutron [req-1e8c09d7-af33-4c89-9273-0a157695216f req-c453475f-c929-4bb3-8772-e66dd8c9a325 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.266982] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1200.267330] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1200.268407] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a28ee7d-e61d-4283-bcad-559479213f70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.276170] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1200.276418] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1200.276721] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-83822fd5-7ac3-4c07-995f-aba07c2feb69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.485213] env[62109]: DEBUG oslo_vmware.rw_handles [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c3aa9-7f34-5272-cef4-db506fa9cf29/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1200.485499] env[62109]: INFO nova.virt.vmwareapi.images [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Downloaded image file data 111e8adf-1386-4dec-af3b-fd9616cd90d5 [ 1200.487071] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d1364a-bb3a-4499-b31c-03a299d52d19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.505652] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3da8de40-a629-43e4-8ff9-a70ce9791895 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.521510] env[62109]: DEBUG nova.scheduler.client.report [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1200.528541] env[62109]: INFO nova.virt.vmwareapi.images [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] The imported VM was unregistered [ 1200.531076] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1200.531312] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Creating directory with path [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.531568] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acbc2b70-1dfa-4c0c-b9b1-763380b8d7e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.541409] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Created directory with path [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.541625] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d/OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d.vmdk to [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk. {{(pid=62109) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1200.541893] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-268492dd-7f61-4d07-91ec-d07e2a3f2973 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.548912] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1200.548912] env[62109]: value = "task-1117352" [ 1200.548912] env[62109]: _type = "Task" [ 1200.548912] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.557522] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.565947] env[62109]: DEBUG nova.network.neutron [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.599651] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-510be665-1fe4-4d08-a181-687bd8557381 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.609640] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8939a219-615e-495b-8b6c-adef9f3c302a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.636356] env[62109]: DEBUG nova.compute.manager [req-1e8c09d7-af33-4c89-9273-0a157695216f req-c453475f-c929-4bb3-8772-e66dd8c9a325 service nova] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Detach interface failed, port_id=a8a6a51f-10bc-4c16-bcad-4cddb9938e8e, reason: Instance 13b24cd7-f0ae-4679-9f9d-5a914e1a2970 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1201.066105] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.068801] env[62109]: INFO nova.compute.manager [-] [instance: 13b24cd7-f0ae-4679-9f9d-5a914e1a2970] Took 1.25 seconds to deallocate network for instance. [ 1201.535353] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.748s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.560695] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.577045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.577343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.577577] env[62109]: DEBUG nova.objects.instance [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 13b24cd7-f0ae-4679-9f9d-5a914e1a2970 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.061094] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.206449] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c2a0d3-30c6-4795-8ff5-98c2c87805a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.215057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9005753-bf22-4aa1-816e-fec6697920d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.246079] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc82bbc-1fd2-4721-98bd-7e13924137d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.254626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba520bae-17d5-40fd-b623-0391df6eed1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.270284] env[62109]: DEBUG nova.compute.provider_tree [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.564250] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.774116] env[62109]: DEBUG nova.scheduler.client.report [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1203.064960] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117352, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.367567} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.065463] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d/OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d.vmdk to [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk. [ 1203.065712] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Cleaning up location [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1203.066201] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_5698ece7-6e4b-4784-a6d2-1c81f0432a4d {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.066270] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c935ab3-70fe-4208-8070-907a336bc17e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.072685] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1203.072685] env[62109]: value = "task-1117353" [ 1203.072685] env[62109]: _type = "Task" [ 1203.072685] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.076480] env[62109]: INFO nova.compute.manager [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Swapping old allocation on dict_keys(['574e9717-c25e-453d-8028-45d9e2f95398']) held by migration 084cd1ff-21dd-45d3-887b-de34c1622573 for instance [ 1203.083217] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.097408] env[62109]: DEBUG nova.scheduler.client.report [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Overwriting current allocation {'allocations': {'574e9717-c25e-453d-8028-45d9e2f95398': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 165}}, 'project_id': 'c9e5867b8b7e4ed18c5395baf46db66f', 'user_id': 'b39ff10ac8bd4e4abf04fd881e5125ac', 'consumer_generation': 1} on consumer fdc4486a-4837-4006-87c8-166cd5c41fcd {{(pid=62109) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1203.172329] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.172527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.172745] env[62109]: DEBUG nova.network.neutron [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1203.279575] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.295746] env[62109]: INFO nova.scheduler.client.report [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 13b24cd7-f0ae-4679-9f9d-5a914e1a2970 [ 1203.583343] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.050283} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.583564] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1203.583739] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.583993] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk to [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1203.584262] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a66e413b-b680-4826-aeeb-4830884cbb60 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.590701] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1203.590701] env[62109]: value = "task-1117354" [ 1203.590701] env[62109]: _type = "Task" [ 1203.590701] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.598252] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.803313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-dd1c27bf-4aa9-4581-aa6d-84788d2bc0ef tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "13b24cd7-f0ae-4679-9f9d-5a914e1a2970" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.394s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.908345] env[62109]: DEBUG nova.network.neutron [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [{"id": "8a4912e9-48e5-4762-aad9-050359873623", "address": "fa:16:3e:1b:58:93", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4912e9-48", "ovs_interfaceid": "8a4912e9-48e5-4762-aad9-050359873623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.916507] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "364c7902-428f-4173-9a1d-934d1daf8dc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.916761] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.916997] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.917226] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.917402] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.919560] env[62109]: INFO nova.compute.manager [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Terminating instance [ 1203.921748] env[62109]: DEBUG nova.compute.manager [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1203.921954] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1203.922846] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0cc66a-015c-4eb1-aee2-385bf13210ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.931902] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1203.932225] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7a41ed9-c73c-42ad-b2bf-13d2d510736f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.939671] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1203.939671] env[62109]: value = "task-1117355" [ 1203.939671] env[62109]: _type = "Task" [ 1203.939671] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.948472] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.100753] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.413554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-fdc4486a-4837-4006-87c8-166cd5c41fcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.414167] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1204.414482] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfd5a95f-b64c-4024-848d-2bbd6f94d4eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.423044] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1204.423044] env[62109]: value = "task-1117356" [ 1204.423044] env[62109]: _type = "Task" [ 1204.423044] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.432914] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.450594] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117355, 'name': PowerOffVM_Task, 'duration_secs': 0.238249} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.450983] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1204.451212] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1204.451560] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c54dba1-a78e-45a7-8ccc-493ef7172ba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.602534] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.880458] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1204.880660] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1204.880847] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore1] 364c7902-428f-4173-9a1d-934d1daf8dc3 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1204.881244] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-219131a0-4f82-4e6a-b703-526f702aaf5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.889133] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1204.889133] env[62109]: value = "task-1117358" [ 1204.889133] env[62109]: _type = "Task" [ 1204.889133] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.897309] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.933033] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117356, 'name': PowerOffVM_Task, 'duration_secs': 0.230466} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.933482] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1204.934273] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1204.934474] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1204.934638] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.934831] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1204.934986] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.935162] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1204.935411] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1204.935586] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1204.935765] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1204.935938] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1204.936136] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1204.941301] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f49f0c5-cd5c-498f-ad8d-16147f024e73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.958265] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1204.958265] env[62109]: value = "task-1117359" [ 1204.958265] env[62109]: _type = "Task" [ 1204.958265] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.967024] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.102912] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.398763] env[62109]: DEBUG oslo_vmware.api [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365453} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.399074] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1205.399258] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1205.399445] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1205.399629] env[62109]: INFO nova.compute.manager [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1205.399881] env[62109]: DEBUG oslo.service.loopingcall [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.400097] env[62109]: DEBUG nova.compute.manager [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1205.400199] env[62109]: DEBUG nova.network.neutron [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1205.467039] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117359, 'name': ReconfigVM_Task, 'duration_secs': 0.141466} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.467915] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b575a2-16ac-4558-9586-d1c934a56d53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.488301] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1205.488620] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1205.488785] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1205.489817] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1205.490039] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1205.490213] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1205.490442] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1205.490617] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1205.490839] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1205.491062] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1205.491271] env[62109]: DEBUG nova.virt.hardware [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1205.492206] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dea205f-4578-4a7a-9218-aac4145a97b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.498511] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1205.498511] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52406a1f-80ce-ddb3-2c70-17f8cc901ddf" [ 1205.498511] env[62109]: _type = "Task" [ 1205.498511] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.508851] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52406a1f-80ce-ddb3-2c70-17f8cc901ddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.602275] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.691969] env[62109]: DEBUG nova.compute.manager [req-3b866657-dad8-4979-a821-29119319d778 req-477753d0-a942-47ea-964b-5b57da21ce4f service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Received event network-vif-deleted-4614eaa6-23c7-403d-b647-39895157b961 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1205.692235] env[62109]: INFO nova.compute.manager [req-3b866657-dad8-4979-a821-29119319d778 req-477753d0-a942-47ea-964b-5b57da21ce4f service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Neutron deleted interface 4614eaa6-23c7-403d-b647-39895157b961; detaching it from the instance and deleting it from the info cache [ 1205.692373] env[62109]: DEBUG nova.network.neutron [req-3b866657-dad8-4979-a821-29119319d778 req-477753d0-a942-47ea-964b-5b57da21ce4f service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.009311] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52406a1f-80ce-ddb3-2c70-17f8cc901ddf, 'name': SearchDatastore_Task, 'duration_secs': 0.016684} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.014792] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1206.015127] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-401e2ad7-393e-494a-9b3f-9cd444b4e0e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.036150] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1206.036150] env[62109]: value = "task-1117360" [ 1206.036150] env[62109]: _type = "Task" [ 1206.036150] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.046258] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117360, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.105563] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117354, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.223597} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.106210] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/111e8adf-1386-4dec-af3b-fd9616cd90d5/111e8adf-1386-4dec-af3b-fd9616cd90d5.vmdk to [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1206.107013] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13920491-a80b-4b77-b3c4-2da112328caf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.130084] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.130373] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63638c60-a771-4e07-868f-ce37838c1557 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.151561] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1206.151561] env[62109]: value = "task-1117361" [ 1206.151561] env[62109]: _type = "Task" [ 1206.151561] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.159076] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117361, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.169516] env[62109]: DEBUG nova.network.neutron [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.195487] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8efed090-9c1b-40a5-8828-578d0d139e4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.205803] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba785ea-ef98-418b-a536-64b10b071117 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.232246] env[62109]: DEBUG nova.compute.manager [req-3b866657-dad8-4979-a821-29119319d778 req-477753d0-a942-47ea-964b-5b57da21ce4f service nova] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Detach interface failed, port_id=4614eaa6-23c7-403d-b647-39895157b961, reason: Instance 364c7902-428f-4173-9a1d-934d1daf8dc3 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1206.547154] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117360, 'name': ReconfigVM_Task, 'duration_secs': 0.259431} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.547744] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1206.548560] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3a8979-2086-4770-b8c9-ca8d8e80883f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.572098] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.572098] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-483fd262-56be-4f87-b985-41340803a4e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.590581] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1206.590581] env[62109]: value = "task-1117362" [ 1206.590581] env[62109]: _type = "Task" [ 1206.590581] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.601528] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.661816] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117361, 'name': ReconfigVM_Task, 'duration_secs': 0.336168} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.662257] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Reconfigured VM instance instance-0000006a to attach disk [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169/c7a95d76-b143-45ce-87b3-de0b63e53169.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.662962] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8119d463-ad36-4954-a6d6-aff7d212386c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.669470] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1206.669470] env[62109]: value = "task-1117363" [ 1206.669470] env[62109]: _type = "Task" [ 1206.669470] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.672581] env[62109]: INFO nova.compute.manager [-] [instance: 364c7902-428f-4173-9a1d-934d1daf8dc3] Took 1.27 seconds to deallocate network for instance. [ 1206.680240] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117363, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.974415] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.974674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.100539] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117362, 'name': ReconfigVM_Task, 'duration_secs': 0.409666} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.100869] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Reconfigured VM instance instance-0000006c to attach disk [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd/fdc4486a-4837-4006-87c8-166cd5c41fcd.vmdk or device None with type thin {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.101724] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f25a5b4-7c8b-4798-b931-d8096ada01bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.119383] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb11536-da4b-4cdf-b2c0-f579214140dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.138610] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818ff506-24ef-4e38-a176-dc1ddf27a634 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.156149] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fc94d0-c990-4d5e-b2f5-36c23c2d832b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.162529] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1207.162763] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6deaa3a6-35fc-406a-9ae2-5963778f4917 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.168534] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1207.168534] env[62109]: value = "task-1117364" [ 1207.168534] env[62109]: _type = "Task" [ 1207.168534] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.177971] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.181302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.181532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.181791] env[62109]: DEBUG nova.objects.instance [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 364c7902-428f-4173-9a1d-934d1daf8dc3 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.182824] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117363, 'name': Rename_Task, 'duration_secs': 0.159232} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.183067] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1207.183499] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9653bf3-1577-4111-bff3-0d832a8a86b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.190104] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1207.190104] env[62109]: value = "task-1117365" [ 1207.190104] env[62109]: _type = "Task" [ 1207.190104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.201507] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.477771] env[62109]: INFO nova.compute.manager [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Detaching volume 3d953331-6f18-4580-a50a-a728e86a4128 [ 1207.513614] env[62109]: INFO nova.virt.block_device [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Attempting to driver detach volume 3d953331-6f18-4580-a50a-a728e86a4128 from mountpoint /dev/sdb [ 1207.514038] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Volume detach. Driver type: vmdk {{(pid=62109) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1207.514313] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244564', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'name': 'volume-3d953331-6f18-4580-a50a-a728e86a4128', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '1b3d7fa7-5428-460e-ab47-49c6d38f24a5', 'attached_at': '', 'detached_at': '', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'serial': '3d953331-6f18-4580-a50a-a728e86a4128'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1207.515337] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c0ca86-e1d4-4a1f-bfbe-85e7fd4194e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.536915] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684079a2-4139-4fc2-9b67-a6aa0a410430 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.544292] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c703f7-6aea-4982-9bc3-ec32cb55185a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.564066] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b67570-606e-45e5-9828-3a69ed65a045 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.580138] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] The volume has not been displaced from its original location: [datastore1] volume-3d953331-6f18-4580-a50a-a728e86a4128/volume-3d953331-6f18-4580-a50a-a728e86a4128.vmdk. No consolidation needed. {{(pid=62109) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1207.585411] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1207.585709] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dbd27de-f330-4a3b-a784-8280a5d02b52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.603534] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1207.603534] env[62109]: value = "task-1117366" [ 1207.603534] env[62109]: _type = "Task" [ 1207.603534] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.613819] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.677975] env[62109]: DEBUG oslo_vmware.api [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117364, 'name': PowerOnVM_Task, 'duration_secs': 0.369294} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.680047] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1207.699973] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117365, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.764573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdb76e8-b555-44b7-b07b-45e43c2d15de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.772270] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d32594-fd66-4f31-9c39-3180f0a3ff52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.801435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e50cdb-602c-4a8b-8572-364280d3e627 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.808914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f00e95-dc04-46a2-be8c-6979ffc9cc3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.821834] env[62109]: DEBUG nova.compute.provider_tree [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.114861] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117366, 'name': ReconfigVM_Task, 'duration_secs': 0.451571} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.115154] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62109) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1208.119717] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1afe3b5-ba12-4d45-a75f-5c022a34f429 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.134594] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1208.134594] env[62109]: value = "task-1117367" [ 1208.134594] env[62109]: _type = "Task" [ 1208.134594] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.142215] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117367, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.200249] env[62109]: DEBUG oslo_vmware.api [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117365, 'name': PowerOnVM_Task, 'duration_secs': 0.696752} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.200570] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1208.307527] env[62109]: DEBUG nova.compute.manager [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1208.308508] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e0511a-4531-4944-ba91-bc78ad549e8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.324552] env[62109]: DEBUG nova.scheduler.client.report [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1208.644674] env[62109]: DEBUG oslo_vmware.api [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117367, 'name': ReconfigVM_Task, 'duration_secs': 0.147378} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.644999] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-244564', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'name': 'volume-3d953331-6f18-4580-a50a-a728e86a4128', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '1b3d7fa7-5428-460e-ab47-49c6d38f24a5', 'attached_at': '', 'detached_at': '', 'volume_id': '3d953331-6f18-4580-a50a-a728e86a4128', 'serial': '3d953331-6f18-4580-a50a-a728e86a4128'} {{(pid=62109) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1208.689972] env[62109]: INFO nova.compute.manager [None req-f23b997f-7059-4eb5-85e5-1d721a915897 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance to original state: 'active' [ 1208.824979] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6443b9bc-ddab-4092-acf4-71c23ef1fdba tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.099s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.829161] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.850821] env[62109]: INFO nova.scheduler.client.report [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 364c7902-428f-4173-9a1d-934d1daf8dc3 [ 1209.186040] env[62109]: DEBUG nova.objects.instance [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'flavor' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.359897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-99b69d9a-9932-4f94-9ab0-c1f3d29fbb37 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "364c7902-428f-4173-9a1d-934d1daf8dc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.443s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.003823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.004163] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.094683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.094983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.095252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.095447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.095634] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.098170] env[62109]: INFO nova.compute.manager [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Terminating instance [ 1210.102723] env[62109]: DEBUG nova.compute.manager [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1210.102823] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1210.104022] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a058819-cd20-46df-a344-7c51efdbef55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.113130] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1210.113393] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-706fc0ec-95eb-4006-8e8b-3fb94f10f6a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.119259] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1210.119259] env[62109]: value = "task-1117368" [ 1210.119259] env[62109]: _type = "Task" [ 1210.119259] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.127557] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.193138] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a88f2ee9-7fb8-4026-9b30-7020fec9b045 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.218s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.506794] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1210.607691] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.607946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.608186] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.608378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.608555] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.610823] env[62109]: INFO nova.compute.manager [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Terminating instance [ 1210.612763] env[62109]: DEBUG nova.compute.manager [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1210.612969] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1210.613800] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b02d4f-f0c3-4bec-a910-3c75fc00750d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.621477] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1210.624827] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13a84577-3c25-4c24-a87d-670ab1dd661e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.630700] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117368, 'name': PowerOffVM_Task, 'duration_secs': 0.235362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.631842] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1210.632028] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1210.632366] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1210.632366] env[62109]: value = "task-1117369" [ 1210.632366] env[62109]: _type = "Task" [ 1210.632366] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.632563] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34f2e2a1-4457-4fa2-beb8-e43f90a3464c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.641675] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.701447] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1210.701824] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1210.702153] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleting the datastore file [datastore2] c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.702558] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d254e9c-4b3f-454e-8abc-21070e3bc752 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.710033] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for the task: (returnval){ [ 1210.710033] env[62109]: value = "task-1117371" [ 1210.710033] env[62109]: _type = "Task" [ 1210.710033] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.718399] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.028707] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.028972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.030415] env[62109]: INFO nova.compute.claims [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1211.144325] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117369, 'name': PowerOffVM_Task, 'duration_secs': 0.186123} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.144589] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1211.144760] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1211.145008] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c3ede6f-2a75-4237-a240-c7c5ac793eb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.206450] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1211.206665] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1211.206850] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleting the datastore file [datastore1] fdc4486a-4837-4006-87c8-166cd5c41fcd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.207119] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25c9faec-7fe3-46a6-a63d-3b17a708f469 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.215316] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1211.215316] env[62109]: value = "task-1117373" [ 1211.215316] env[62109]: _type = "Task" [ 1211.215316] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.221211] env[62109]: DEBUG oslo_vmware.api [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Task: {'id': task-1117371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145205} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.221738] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.221926] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1211.222121] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1211.222304] env[62109]: INFO nova.compute.manager [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1211.222542] env[62109]: DEBUG oslo.service.loopingcall [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1211.222728] env[62109]: DEBUG nova.compute.manager [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1211.222821] env[62109]: DEBUG nova.network.neutron [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1211.227111] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.245942] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.246175] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.246386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.246571] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.246741] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.248892] env[62109]: INFO nova.compute.manager [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Terminating instance [ 1211.250731] env[62109]: DEBUG nova.compute.manager [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1211.250924] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1211.251716] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56385c52-b522-46e1-862c-71ec5bf3004e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.258227] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1211.258446] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf9e57ad-e8fa-4205-b55f-ae717548d087 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.263586] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1211.263586] env[62109]: value = "task-1117374" [ 1211.263586] env[62109]: _type = "Task" [ 1211.263586] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.271353] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.726964] env[62109]: DEBUG oslo_vmware.api [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14922} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.727252] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.727470] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1211.727719] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1211.727903] env[62109]: INFO nova.compute.manager [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1211.728163] env[62109]: DEBUG oslo.service.loopingcall [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1211.728356] env[62109]: DEBUG nova.compute.manager [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1211.728450] env[62109]: DEBUG nova.network.neutron [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1211.734260] env[62109]: DEBUG nova.compute.manager [req-18b6b599-606a-450e-bf61-37ba479f2ed6 req-27faddee-ec72-4c00-9950-3f9a4f82cebe service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Received event network-vif-deleted-c190fa8c-8d46-43de-943d-554e47a2fe5c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1211.734447] env[62109]: INFO nova.compute.manager [req-18b6b599-606a-450e-bf61-37ba479f2ed6 req-27faddee-ec72-4c00-9950-3f9a4f82cebe service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Neutron deleted interface c190fa8c-8d46-43de-943d-554e47a2fe5c; detaching it from the instance and deleting it from the info cache [ 1211.734618] env[62109]: DEBUG nova.network.neutron [req-18b6b599-606a-450e-bf61-37ba479f2ed6 req-27faddee-ec72-4c00-9950-3f9a4f82cebe service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.773930] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117374, 'name': PowerOffVM_Task, 'duration_secs': 0.198872} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.774228] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1211.774404] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1211.774653] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23cbaad8-c689-4167-8962-1f93c1c274ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.836041] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1211.836389] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1211.836501] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore1] 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.836714] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d144739a-a2e8-4752-b3f7-d23d36a815f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.843463] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1211.843463] env[62109]: value = "task-1117376" [ 1211.843463] env[62109]: _type = "Task" [ 1211.843463] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.850932] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.129713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa30e25-e1e1-40ba-886b-fa752c762536 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.137214] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8bf077-be93-421d-8c8e-8ee4865532de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.168591] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b34892-dd79-43e9-afb8-cf934a7bb36f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.176314] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fcc300-d786-4ddf-b046-9779884eb957 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.189273] env[62109]: DEBUG nova.compute.provider_tree [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.207080] env[62109]: DEBUG nova.network.neutron [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.237489] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb45b7f3-5d05-4bcf-848d-2c5433203ca5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.247618] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8794e8-add3-40d7-aeeb-483e14722d8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.272071] env[62109]: DEBUG nova.compute.manager [req-18b6b599-606a-450e-bf61-37ba479f2ed6 req-27faddee-ec72-4c00-9950-3f9a4f82cebe service nova] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Detach interface failed, port_id=c190fa8c-8d46-43de-943d-554e47a2fe5c, reason: Instance c7a95d76-b143-45ce-87b3-de0b63e53169 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1212.352935] env[62109]: DEBUG oslo_vmware.api [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17566} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.353221] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1212.353413] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1212.353600] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1212.353780] env[62109]: INFO nova.compute.manager [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1212.354046] env[62109]: DEBUG oslo.service.loopingcall [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1212.354253] env[62109]: DEBUG nova.compute.manager [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1212.354337] env[62109]: DEBUG nova.network.neutron [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1212.692745] env[62109]: DEBUG nova.scheduler.client.report [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1212.704564] env[62109]: DEBUG nova.network.neutron [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.708942] env[62109]: INFO nova.compute.manager [-] [instance: c7a95d76-b143-45ce-87b3-de0b63e53169] Took 1.49 seconds to deallocate network for instance. [ 1213.200342] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.200342] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1213.207054] env[62109]: INFO nova.compute.manager [-] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Took 1.48 seconds to deallocate network for instance. [ 1213.216504] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.216764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.216962] env[62109]: DEBUG nova.objects.instance [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lazy-loading 'resources' on Instance uuid c7a95d76-b143-45ce-87b3-de0b63e53169 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.292493] env[62109]: DEBUG nova.network.neutron [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.706329] env[62109]: DEBUG nova.compute.utils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1213.707671] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1213.707835] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1213.711631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.751661] env[62109]: DEBUG nova.policy [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1213.758440] env[62109]: DEBUG nova.compute.manager [req-0e059113-95df-41f3-a65c-aa8e814d006b req-7d159056-962f-487c-bfd9-de2efedb0ae8 service nova] [instance: fdc4486a-4837-4006-87c8-166cd5c41fcd] Received event network-vif-deleted-8a4912e9-48e5-4762-aad9-050359873623 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1213.758524] env[62109]: DEBUG nova.compute.manager [req-0e059113-95df-41f3-a65c-aa8e814d006b req-7d159056-962f-487c-bfd9-de2efedb0ae8 service nova] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Received event network-vif-deleted-a04abfce-a9e7-413a-94d6-d14ed8f205cb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1213.796285] env[62109]: INFO nova.compute.manager [-] [instance: 1b3d7fa7-5428-460e-ab47-49c6d38f24a5] Took 1.44 seconds to deallocate network for instance. [ 1213.801138] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea790611-879c-471f-8ace-41003f1d8c40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.809448] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6385a5fd-9823-4e76-bf0b-642b055b7002 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.840405] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecf6867-cb5e-4c0f-86fb-e895772fd00e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.848089] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32e8a68-cf21-4ad4-a21d-1902c1ea6340 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.862595] env[62109]: DEBUG nova.compute.provider_tree [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.002171] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Successfully created port: 00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.210829] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1214.305646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.364276] env[62109]: DEBUG nova.scheduler.client.report [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1214.868522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.871342] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.160s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.871541] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.873280] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.568s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.873510] env[62109]: DEBUG nova.objects.instance [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'resources' on Instance uuid 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.891595] env[62109]: INFO nova.scheduler.client.report [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Deleted allocations for instance c7a95d76-b143-45ce-87b3-de0b63e53169 [ 1214.893307] env[62109]: INFO nova.scheduler.client.report [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted allocations for instance fdc4486a-4837-4006-87c8-166cd5c41fcd [ 1215.220195] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1215.246846] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1215.247114] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1215.247281] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.247468] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1215.247616] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.247765] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1215.247978] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1215.248154] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1215.248330] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1215.248491] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1215.248663] env[62109]: DEBUG nova.virt.hardware [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1215.249553] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040c81ea-7157-4a50-aa17-4e46be67f1e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.257401] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dcfa7d-ee53-42a2-9776-b460f20779cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.400695] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c2943cbc-049f-4950-879d-d9cc8311d48e tempest-ServerActionsTestOtherB-1141754876 tempest-ServerActionsTestOtherB-1141754876-project-member] Lock "c7a95d76-b143-45ce-87b3-de0b63e53169" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.306s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.404043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ea361e29-2173-4edf-9f93-f45c263473bb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "fdc4486a-4837-4006-87c8-166cd5c41fcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.796s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.444338] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654ef95b-0be3-4bc7-9bb9-15f252d5fae1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.451739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2bf7e8-53c2-435b-98d7-ac359b485c04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.481130] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Successfully updated port: 00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1215.483028] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654ef1bd-47c8-447e-ad80-07703989eea6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.492323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ea7431-47c4-4391-a7a5-ab9bc16214dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.507145] env[62109]: DEBUG nova.compute.provider_tree [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.784699] env[62109]: DEBUG nova.compute.manager [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Received event network-vif-plugged-00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.784939] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Acquiring lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.785172] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.785349] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.785525] env[62109]: DEBUG nova.compute.manager [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] No waiting events found dispatching network-vif-plugged-00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1215.785689] env[62109]: WARNING nova.compute.manager [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Received unexpected event network-vif-plugged-00b62cb0-6491-4a57-8252-a869232aff3a for instance with vm_state building and task_state spawning. [ 1215.785851] env[62109]: DEBUG nova.compute.manager [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Received event network-changed-00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.786013] env[62109]: DEBUG nova.compute.manager [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Refreshing instance network info cache due to event network-changed-00b62cb0-6491-4a57-8252-a869232aff3a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1215.786246] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Acquiring lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.786427] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Acquired lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.786596] env[62109]: DEBUG nova.network.neutron [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Refreshing network info cache for port 00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1215.987261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.010220] env[62109]: DEBUG nova.scheduler.client.report [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.317681] env[62109]: DEBUG nova.network.neutron [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1216.390138] env[62109]: DEBUG nova.network.neutron [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.515541] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.533164] env[62109]: INFO nova.scheduler.client.report [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted allocations for instance 1b3d7fa7-5428-460e-ab47-49c6d38f24a5 [ 1216.583443] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.583588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.892944] env[62109]: DEBUG oslo_concurrency.lockutils [req-31e3e883-5d4e-497d-b3c7-f4354595a866 req-c4aeac2f-2921-4f94-bb38-44acd91023a6 service nova] Releasing lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.893208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.893379] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1217.041274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8ca875db-52bd-4fcc-9938-bd22d71a3ef8 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "1b3d7fa7-5428-460e-ab47-49c6d38f24a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.795s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.086029] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1217.426515] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1217.572057] env[62109]: DEBUG nova.network.neutron [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Updating instance_info_cache with network_info: [{"id": "00b62cb0-6491-4a57-8252-a869232aff3a", "address": "fa:16:3e:d2:07:a8", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b62cb0-64", "ovs_interfaceid": "00b62cb0-6491-4a57-8252-a869232aff3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.607799] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.608075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.610188] env[62109]: INFO nova.compute.claims [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1218.075461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-9548e03e-f51c-4e39-9cc1-27724c2d0961" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.075793] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Instance network_info: |[{"id": "00b62cb0-6491-4a57-8252-a869232aff3a", "address": "fa:16:3e:d2:07:a8", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b62cb0-64", "ovs_interfaceid": "00b62cb0-6491-4a57-8252-a869232aff3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1218.076264] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:07:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00b62cb0-6491-4a57-8252-a869232aff3a', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.084050] env[62109]: DEBUG oslo.service.loopingcall [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1218.084385] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1218.084640] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0383ce55-d947-4d4b-ab0b-d90ced1d5fea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.105960] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.105960] env[62109]: value = "task-1117378" [ 1218.105960] env[62109]: _type = "Task" [ 1218.105960] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.116046] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117378, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.620298] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117378, 'name': CreateVM_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.687117] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb76859f-4469-46c2-92a5-2031411fc478 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.695534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702383e0-b55a-401f-9c44-8c1c91db3eb1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.726107] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85456f3-a5c2-4d11-b9d6-32f9065a42b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.734527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4ba877-437f-4177-be1c-3ef31f338a90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.747661] env[62109]: DEBUG nova.compute.provider_tree [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.116387] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117378, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.254017] env[62109]: DEBUG nova.scheduler.client.report [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1219.336671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.336671] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.616681] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117378, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.758080] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.758655] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1219.838520] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1220.117061] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117378, 'name': CreateVM_Task, 'duration_secs': 1.780781} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.117255] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1220.117966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.118160] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.118501] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1220.118783] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f98e0a0-5271-4195-8f68-aaa5998a9407 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.123192] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1220.123192] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e5697-4f90-ab2f-57e4-5a4929fbbc5b" [ 1220.123192] env[62109]: _type = "Task" [ 1220.123192] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.130667] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e5697-4f90-ab2f-57e4-5a4929fbbc5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.263908] env[62109]: DEBUG nova.compute.utils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1220.265366] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1220.265540] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1220.308010] env[62109]: DEBUG nova.policy [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b39ff10ac8bd4e4abf04fd881e5125ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e5867b8b7e4ed18c5395baf46db66f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1220.367870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.368150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.369805] env[62109]: INFO nova.compute.claims [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1220.638949] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]525e5697-4f90-ab2f-57e4-5a4929fbbc5b, 'name': SearchDatastore_Task, 'duration_secs': 0.01147} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.639408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.639732] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1220.640368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.640771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.641652] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1220.641652] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-656eaf31-0b85-458e-a42c-62dc7c061093 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.644433] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Successfully created port: b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1220.646301] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.646843] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.652779] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.652959] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1220.653675] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84779e36-f6a6-432d-90ee-0c115f8ba224 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.658529] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1220.658529] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5258d8a3-6a2a-6246-bce8-e918409ec65f" [ 1220.658529] env[62109]: _type = "Task" [ 1220.658529] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.666432] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5258d8a3-6a2a-6246-bce8-e918409ec65f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.768431] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1221.170439] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5258d8a3-6a2a-6246-bce8-e918409ec65f, 'name': SearchDatastore_Task, 'duration_secs': 0.008065} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.171248] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c01d1c3-1ebb-40c3-a301-b8a25a72df24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.176351] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1221.176351] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b1a863-9bae-bae6-3ebf-adcad90842d9" [ 1221.176351] env[62109]: _type = "Task" [ 1221.176351] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.183473] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b1a863-9bae-bae6-3ebf-adcad90842d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.449500] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890dcf0b-acc9-43e3-aac5-1f02b221ba3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.456921] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e05fad3-21d9-43ba-b580-0b8eb69a99a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.501050] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800d67c1-ab20-482f-93c2-8638c6384bd5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.510410] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c1a6ca-08ff-4638-aa85-5a65b26a9f7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.525160] env[62109]: DEBUG nova.compute.provider_tree [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.646429] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.686862] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b1a863-9bae-bae6-3ebf-adcad90842d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009172} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.687170] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.687440] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 9548e03e-f51c-4e39-9cc1-27724c2d0961/9548e03e-f51c-4e39-9cc1-27724c2d0961.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1221.687702] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4e6b5c4-b663-41c4-aa10-64ece36090b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.693992] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1221.693992] env[62109]: value = "task-1117379" [ 1221.693992] env[62109]: _type = "Task" [ 1221.693992] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.701413] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.778240] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1221.804433] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1221.804681] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1221.804870] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.805098] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1221.805263] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.805419] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1221.805632] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1221.805795] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1221.805965] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1221.806152] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1221.806336] env[62109]: DEBUG nova.virt.hardware [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1221.807197] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbe3876-7737-42d5-ac92-55405f3163c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.816643] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e101fec0-e153-445b-a434-8374bc901216 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.031156] env[62109]: DEBUG nova.scheduler.client.report [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1222.052459] env[62109]: DEBUG nova.compute.manager [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Received event network-vif-plugged-b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1222.052459] env[62109]: DEBUG oslo_concurrency.lockutils [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] Acquiring lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.052459] env[62109]: DEBUG oslo_concurrency.lockutils [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.052760] env[62109]: DEBUG oslo_concurrency.lockutils [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.053098] env[62109]: DEBUG nova.compute.manager [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] No waiting events found dispatching network-vif-plugged-b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1222.053419] env[62109]: WARNING nova.compute.manager [req-9921c988-5584-4fe8-99f1-41b3a6229fb1 req-57b85c5c-85cd-493f-a4a6-73820abeb885 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Received unexpected event network-vif-plugged-b5890440-3f47-469f-beae-cd3ca9b067d6 for instance with vm_state building and task_state spawning. [ 1222.140419] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Successfully updated port: b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1222.204410] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432793} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.204702] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 9548e03e-f51c-4e39-9cc1-27724c2d0961/9548e03e-f51c-4e39-9cc1-27724c2d0961.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1222.204826] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1222.205129] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b0d3d6f-6e4c-4014-b510-9a644cadd5f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.211547] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1222.211547] env[62109]: value = "task-1117380" [ 1222.211547] env[62109]: _type = "Task" [ 1222.211547] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.218799] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.535621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.536173] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1222.642593] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.644167] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.644305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.644451] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1222.722032] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105231} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.722332] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.723089] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbef34e-2d30-4b9b-9c78-eb7c962fc6b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.744689] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 9548e03e-f51c-4e39-9cc1-27724c2d0961/9548e03e-f51c-4e39-9cc1-27724c2d0961.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.744915] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52fd4c32-3a3f-4088-8fed-d68fe6e74ff2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.764183] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1222.764183] env[62109]: value = "task-1117381" [ 1222.764183] env[62109]: _type = "Task" [ 1222.764183] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.771072] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117381, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.041686] env[62109]: DEBUG nova.compute.utils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1223.043913] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1223.044441] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1223.087722] env[62109]: DEBUG nova.policy [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bba19358571a47f3baafbc9662845961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '430353b9a427408494b462b49f11354a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1223.151800] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.151950] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1223.181285] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1223.274020] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117381, 'name': ReconfigVM_Task, 'duration_secs': 0.301482} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.274459] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 9548e03e-f51c-4e39-9cc1-27724c2d0961/9548e03e-f51c-4e39-9cc1-27724c2d0961.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.275097] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1f75136-281c-4c4c-871b-44b1db3d7ae8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.284108] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1223.284108] env[62109]: value = "task-1117382" [ 1223.284108] env[62109]: _type = "Task" [ 1223.284108] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.292366] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117382, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.310327] env[62109]: DEBUG nova.network.neutron [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.362611] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Successfully created port: ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1223.550768] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1223.654531] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1223.654759] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.654919] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.655084] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.655219] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1223.795207] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117382, 'name': Rename_Task, 'duration_secs': 0.280069} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.795536] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1223.795890] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca306114-b44f-4093-8c16-8672519c9596 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.802428] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1223.802428] env[62109]: value = "task-1117383" [ 1223.802428] env[62109]: _type = "Task" [ 1223.802428] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.810576] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.813115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.813416] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Instance network_info: |[{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1223.813794] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:e8:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5890440-3f47-469f-beae-cd3ca9b067d6', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1223.821155] env[62109]: DEBUG oslo.service.loopingcall [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.821366] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1223.821589] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e41bdda-e31a-49aa-ba3f-3d430f45b055 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.840537] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1223.840537] env[62109]: value = "task-1117384" [ 1223.840537] env[62109]: _type = "Task" [ 1223.840537] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.848402] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117384, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.078626] env[62109]: DEBUG nova.compute.manager [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Received event network-changed-b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1224.079031] env[62109]: DEBUG nova.compute.manager [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Refreshing instance network info cache due to event network-changed-b5890440-3f47-469f-beae-cd3ca9b067d6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1224.079350] env[62109]: DEBUG oslo_concurrency.lockutils [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] Acquiring lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.079350] env[62109]: DEBUG oslo_concurrency.lockutils [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] Acquired lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.079472] env[62109]: DEBUG nova.network.neutron [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Refreshing network info cache for port b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1224.312966] env[62109]: DEBUG oslo_vmware.api [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117383, 'name': PowerOnVM_Task, 'duration_secs': 0.463745} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.313225] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1224.313450] env[62109]: INFO nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1224.313634] env[62109]: DEBUG nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1224.314394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72b0d5a-53d2-4599-ae2d-b9ee6c9969c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.349951] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117384, 'name': CreateVM_Task, 'duration_secs': 0.299277} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.350123] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1224.350812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.350980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.351392] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1224.352228] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5c79c1d-ecbf-4ccd-b05e-700cb8706f81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.356976] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1224.356976] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98f4a-2841-52f5-8da1-6af3eb046d3b" [ 1224.356976] env[62109]: _type = "Task" [ 1224.356976] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.364199] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98f4a-2841-52f5-8da1-6af3eb046d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.559701] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1224.587349] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1224.587601] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1224.587762] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.587948] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1224.588112] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.588265] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1224.588474] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1224.588649] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1224.588883] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1224.589102] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1224.589313] env[62109]: DEBUG nova.virt.hardware [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1224.590184] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7fc6d0-d776-4003-a3a4-cdbb734e0f01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.598638] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ac5a00-df95-4f2c-8f25-9ed7a40e85b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.646763] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.784679] env[62109]: DEBUG nova.network.neutron [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updated VIF entry in instance network info cache for port b5890440-3f47-469f-beae-cd3ca9b067d6. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1224.785064] env[62109]: DEBUG nova.network.neutron [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.831207] env[62109]: INFO nova.compute.manager [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Took 13.82 seconds to build instance. [ 1224.867890] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52e98f4a-2841-52f5-8da1-6af3eb046d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.868241] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.868673] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1224.868878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.868878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.869055] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1224.869338] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5232671d-9e95-41ec-8ccb-2f536de180fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.877671] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1224.877839] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1224.878547] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b98024-ea5b-43a7-9d33-c0808bd0218a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.883620] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1224.883620] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520ad449-0f98-17d4-265b-f01ca6847501" [ 1224.883620] env[62109]: _type = "Task" [ 1224.883620] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.891057] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520ad449-0f98-17d4-265b-f01ca6847501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.150188] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.150500] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.150640] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.150738] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1225.151694] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8e4352-b6d7-4280-8e80-9a87cef03cbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.160352] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e2c8cc-b81f-4238-971a-20cdf36abb8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.166965] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Successfully updated port: ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1225.177478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d541de6a-0f05-4e1d-91ed-06a54b85efe5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.187020] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed65239c-55f1-4812-9654-e10eed8ac454 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.216867] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180986MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1225.217041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.217244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.287798] env[62109]: DEBUG oslo_concurrency.lockutils [req-7ba1b21b-4123-4d02-bc67-e6792d146012 req-e951d6b2-304c-4f57-94bd-7afa96f566c9 service nova] Releasing lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.332965] env[62109]: DEBUG oslo_concurrency.lockutils [None req-59fc14e2-d28d-409b-8bbc-09d394aa91dc tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.329s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.395115] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]520ad449-0f98-17d4-265b-f01ca6847501, 'name': SearchDatastore_Task, 'duration_secs': 0.008333} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.396499] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-601ab63b-1787-45fb-84b5-063324d52ac4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.404902] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1225.404902] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ed3838-728e-9993-a713-b9c553da9bb3" [ 1225.404902] env[62109]: _type = "Task" [ 1225.404902] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.418069] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52ed3838-728e-9993-a713-b9c553da9bb3, 'name': SearchDatastore_Task, 'duration_secs': 0.009042} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.418579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.419061] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 88fb92c9-9587-4d2b-b090-451e8f5b93ab/88fb92c9-9587-4d2b-b090-451e8f5b93ab.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1225.419448] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d914bf53-ec3f-4598-a2c7-c65cedb78f86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.434195] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1225.434195] env[62109]: value = "task-1117385" [ 1225.434195] env[62109]: _type = "Task" [ 1225.434195] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.444222] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.669416] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.669647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.669647] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1225.931685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.932084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.932323] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.932610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.932879] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.939889] env[62109]: INFO nova.compute.manager [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Terminating instance [ 1225.942687] env[62109]: DEBUG nova.compute.manager [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1225.942976] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1225.944067] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30087d4d-a383-4b54-8ab1-673b39a4b3ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.952596] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.424793} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.953356] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] 88fb92c9-9587-4d2b-b090-451e8f5b93ab/88fb92c9-9587-4d2b-b090-451e8f5b93ab.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1225.953665] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1225.953970] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b679364-e5f7-439f-b9ce-32d57589f727 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.959143] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1225.959789] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76f23bbe-6b8a-44d0-a1bc-2013f80f8201 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.965054] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1225.965054] env[62109]: value = "task-1117386" [ 1225.965054] env[62109]: _type = "Task" [ 1225.965054] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.969288] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1225.969288] env[62109]: value = "task-1117387" [ 1225.969288] env[62109]: _type = "Task" [ 1225.969288] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.975126] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.980591] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.110740] env[62109]: DEBUG nova.compute.manager [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1226.111095] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.111361] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.111515] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.111695] env[62109]: DEBUG nova.compute.manager [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] No waiting events found dispatching network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1226.111867] env[62109]: WARNING nova.compute.manager [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received unexpected event network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 for instance with vm_state building and task_state spawning. [ 1226.112050] env[62109]: DEBUG nova.compute.manager [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1226.112213] env[62109]: DEBUG nova.compute.manager [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing instance network info cache due to event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1226.112385] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.201291] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1226.241799] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.241960] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 8a64a700-e381-49a0-89ae-8a678ed7a4fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.242126] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 9548e03e-f51c-4e39-9cc1-27724c2d0961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.242257] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance 88fb92c9-9587-4d2b-b090-451e8f5b93ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.242373] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance cbc1367e-3d62-4e33-aaad-5112319c1326 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1226.242545] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1226.242677] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1226.311472] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd849d10-60e4-4cca-9ec5-cdbe59b3146e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.319304] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8176f78a-53d3-4d68-8015-72e90e452ac7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.348948] env[62109]: DEBUG nova.network.neutron [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.350664] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041a3c51-4f86-4f35-bfb7-bbce513089aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.358766] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e6d16c-21b9-4b78-9b0a-9bede66d4083 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.373151] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.480776] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067689} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.480997] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117387, 'name': PowerOffVM_Task, 'duration_secs': 0.180174} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.481242] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1226.481522] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1226.481687] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1226.482374] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f8216f-9bc6-4d36-9d85-6fa4e2b45d46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.484536] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32c0c68d-671d-4863-8860-e1154f40f10d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.506334] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 88fb92c9-9587-4d2b-b090-451e8f5b93ab/88fb92c9-9587-4d2b-b090-451e8f5b93ab.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.506552] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07e04015-240a-4800-bad9-4f3fe175fbfa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.525569] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1226.525569] env[62109]: value = "task-1117389" [ 1226.525569] env[62109]: _type = "Task" [ 1226.525569] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.532884] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.562580] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1226.562821] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1226.563035] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore1] 9548e03e-f51c-4e39-9cc1-27724c2d0961 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.563334] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98886c8d-9885-4f84-b5fe-78a163765087 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.569130] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1226.569130] env[62109]: value = "task-1117390" [ 1226.569130] env[62109]: _type = "Task" [ 1226.569130] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.576166] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.854514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.854875] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance network_info: |[{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1226.855217] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.855406] env[62109]: DEBUG nova.network.neutron [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1226.856679] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:6e:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea50f1f4-6955-463b-8cb2-d2e7ebbb6335', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1226.863935] env[62109]: DEBUG oslo.service.loopingcall [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1226.866771] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1226.867267] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c85afd4-ea5f-46f4-a84c-9953ac00137f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.882148] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1226.890232] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1226.890232] env[62109]: value = "task-1117391" [ 1226.890232] env[62109]: _type = "Task" [ 1226.890232] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.898109] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117391, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.037752] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117389, 'name': ReconfigVM_Task, 'duration_secs': 0.288467} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.038076] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 88fb92c9-9587-4d2b-b090-451e8f5b93ab/88fb92c9-9587-4d2b-b090-451e8f5b93ab.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1227.038700] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b9309f7-4d52-4947-9c9e-2cc593020f41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.045063] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1227.045063] env[62109]: value = "task-1117392" [ 1227.045063] env[62109]: _type = "Task" [ 1227.045063] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.053101] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117392, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.073651] env[62109]: DEBUG nova.network.neutron [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updated VIF entry in instance network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1227.074018] env[62109]: DEBUG nova.network.neutron [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.080010] env[62109]: DEBUG oslo_vmware.api [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144565} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.080255] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.080456] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1227.080640] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1227.080815] env[62109]: INFO nova.compute.manager [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1227.081070] env[62109]: DEBUG oslo.service.loopingcall [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.081309] env[62109]: DEBUG nova.compute.manager [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1227.081402] env[62109]: DEBUG nova.network.neutron [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1227.387453] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1227.387855] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.170s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.399720] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117391, 'name': CreateVM_Task, 'duration_secs': 0.349546} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.399880] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1227.400539] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.400713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.401039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1227.401312] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4f0abe-82b8-4391-b105-28d4cba6c8ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.405532] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1227.405532] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529dfbc7-ffdb-3d70-40db-40f544d45a6c" [ 1227.405532] env[62109]: _type = "Task" [ 1227.405532] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.412591] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529dfbc7-ffdb-3d70-40db-40f544d45a6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.555671] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117392, 'name': Rename_Task, 'duration_secs': 0.192131} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.556090] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1227.556380] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c8c33d4-af19-4699-95a2-da1ef2478f4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.563280] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1227.563280] env[62109]: value = "task-1117393" [ 1227.563280] env[62109]: _type = "Task" [ 1227.563280] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.571596] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.576266] env[62109]: DEBUG oslo_concurrency.lockutils [req-a2d181d0-bfdb-4e9f-941f-76b34495cbbe req-fbe2d06e-f2b8-46de-90bd-807c7b26fdeb service nova] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.844933] env[62109]: DEBUG nova.network.neutron [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.917574] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]529dfbc7-ffdb-3d70-40db-40f544d45a6c, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.917921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.918197] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.918448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.918606] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.918800] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.919082] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11c3665a-7470-4841-ace1-5b50288a61de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.927913] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.928118] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1227.928835] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534f806a-f81b-4f51-a0c6-cec5ffed340f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.934338] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1227.934338] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524ab333-b58a-e7e1-4460-fa6c04d0c540" [ 1227.934338] env[62109]: _type = "Task" [ 1227.934338] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.942474] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524ab333-b58a-e7e1-4460-fa6c04d0c540, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.074114] env[62109]: DEBUG oslo_vmware.api [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117393, 'name': PowerOnVM_Task, 'duration_secs': 0.476237} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.074366] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1228.074600] env[62109]: INFO nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1228.074800] env[62109]: DEBUG nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1228.075573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d9e164-f4c2-4af0-a8ea-a31a0a59e8d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.136091] env[62109]: DEBUG nova.compute.manager [req-5e57c5c7-f6e2-41f0-b347-a493dc73482f req-a3304012-a00b-4f5a-878b-488235c9631f service nova] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Received event network-vif-deleted-00b62cb0-6491-4a57-8252-a869232aff3a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1228.348155] env[62109]: INFO nova.compute.manager [-] [instance: 9548e03e-f51c-4e39-9cc1-27724c2d0961] Took 1.27 seconds to deallocate network for instance. [ 1228.445745] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524ab333-b58a-e7e1-4460-fa6c04d0c540, 'name': SearchDatastore_Task, 'duration_secs': 0.009141} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.446552] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8491099c-9f52-4f3b-a35c-f5fc4292ec54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.452131] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1228.452131] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5287ddef-f3b9-22ff-cf49-d2a441aa3092" [ 1228.452131] env[62109]: _type = "Task" [ 1228.452131] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.459717] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5287ddef-f3b9-22ff-cf49-d2a441aa3092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.595796] env[62109]: INFO nova.compute.manager [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Took 11.01 seconds to build instance. [ 1228.854547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.854797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.855110] env[62109]: DEBUG nova.objects.instance [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 9548e03e-f51c-4e39-9cc1-27724c2d0961 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.962726] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5287ddef-f3b9-22ff-cf49-d2a441aa3092, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.963020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.963294] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1228.963561] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3afddfb6-d1aa-4ff1-a7da-e340b315492b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.970441] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1228.970441] env[62109]: value = "task-1117394" [ 1228.970441] env[62109]: _type = "Task" [ 1228.970441] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.977717] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.098200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12929bf4-2336-439a-9c32-8718ced0e33c tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.514s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.382704] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.438593] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce55cc25-977b-4628-90c2-684da9595158 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.446861] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18cb88-bcb9-4f61-9bbe-b6ac494594f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.479966] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eda8da9-92ed-4c40-a656-ab68acf56d1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.487172] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467972} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.489128] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore1] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1229.489347] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1229.489610] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c0bf80e-4860-4d41-afca-612f407ff9f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.493033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d025f28-719d-43b7-80ac-0998efc8279c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.506227] env[62109]: DEBUG nova.compute.provider_tree [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.508662] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1229.508662] env[62109]: value = "task-1117395" [ 1229.508662] env[62109]: _type = "Task" [ 1229.508662] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.517304] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.011053] env[62109]: DEBUG nova.scheduler.client.report [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1230.023078] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.242922} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.023325] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.024088] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c921d1f-f85c-4258-984d-f24210fb311e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.046524] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.047140] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-055575a3-3da3-4344-9d3f-ca9c30be1e1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.066478] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1230.066478] env[62109]: value = "task-1117396" [ 1230.066478] env[62109]: _type = "Task" [ 1230.066478] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.074350] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.163018] env[62109]: DEBUG nova.compute.manager [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Received event network-changed-b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1230.163235] env[62109]: DEBUG nova.compute.manager [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Refreshing instance network info cache due to event network-changed-b5890440-3f47-469f-beae-cd3ca9b067d6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1230.163503] env[62109]: DEBUG oslo_concurrency.lockutils [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] Acquiring lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.163706] env[62109]: DEBUG oslo_concurrency.lockutils [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] Acquired lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.163877] env[62109]: DEBUG nova.network.neutron [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Refreshing network info cache for port b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1230.519062] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.540485] env[62109]: INFO nova.scheduler.client.report [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 9548e03e-f51c-4e39-9cc1-27724c2d0961 [ 1231.334365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-32808ee5-80cb-40c2-af89-5076b72baae0 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "9548e03e-f51c-4e39-9cc1-27724c2d0961" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.402s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.339434] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117396, 'name': ReconfigVM_Task, 'duration_secs': 0.948194} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.339689] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Reconfigured VM instance instance-00000073 to attach disk [datastore1] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.340415] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa709bf2-fe30-45d3-af30-37731ea3b898 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.348035] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1231.348035] env[62109]: value = "task-1117397" [ 1231.348035] env[62109]: _type = "Task" [ 1231.348035] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.355766] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117397, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.542179] env[62109]: DEBUG nova.network.neutron [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updated VIF entry in instance network info cache for port b5890440-3f47-469f-beae-cd3ca9b067d6. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1231.542565] env[62109]: DEBUG nova.network.neutron [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.858233] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117397, 'name': Rename_Task, 'duration_secs': 0.147531} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.858546] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1231.858807] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d035ccd6-9daa-4fa6-9bfb-4becd48c9c3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.866110] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1231.866110] env[62109]: value = "task-1117398" [ 1231.866110] env[62109]: _type = "Task" [ 1231.866110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.876341] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "811ab957-14d4-47d0-a44c-f9f44e166af4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.876573] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.877592] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.045795] env[62109]: DEBUG oslo_concurrency.lockutils [req-046b870d-f76b-424a-ac7e-987010086b53 req-03b9100b-0007-471d-ae26-62c88e2791b0 service nova] Releasing lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.375698] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.379057] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1232.876215] env[62109]: DEBUG oslo_vmware.api [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117398, 'name': PowerOnVM_Task, 'duration_secs': 0.954135} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.876616] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1232.876709] env[62109]: INFO nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1232.876869] env[62109]: DEBUG nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1232.877632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796a1317-626b-4e09-af3b-467060fa146d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.902259] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.902536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.904097] env[62109]: INFO nova.compute.claims [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1233.396880] env[62109]: INFO nova.compute.manager [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Took 13.05 seconds to build instance. [ 1233.633289] env[62109]: DEBUG nova.compute.manager [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1233.633621] env[62109]: DEBUG nova.compute.manager [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing instance network info cache due to event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1233.633738] env[62109]: DEBUG oslo_concurrency.lockutils [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.633879] env[62109]: DEBUG oslo_concurrency.lockutils [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.634057] env[62109]: DEBUG nova.network.neutron [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1233.899498] env[62109]: DEBUG oslo_concurrency.lockutils [None req-65b70be9-6b02-4914-9a7e-9a0d9b397a6d tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.563s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.977958] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb3b9b2-df40-4347-8f68-1c484741cbf9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.985683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b51578-9358-4396-80a9-73c68bdc8ead {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.015740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44f703e-f020-42af-8b74-ca78caa4a96e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.022992] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e340ee9-0513-4a2c-97bc-741022cf01af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.035865] env[62109]: DEBUG nova.compute.provider_tree [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.336267] env[62109]: DEBUG nova.network.neutron [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updated VIF entry in instance network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1234.336817] env[62109]: DEBUG nova.network.neutron [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.538612] env[62109]: DEBUG nova.scheduler.client.report [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1234.839418] env[62109]: DEBUG oslo_concurrency.lockutils [req-2bcfd7d3-be7a-4d87-af87-fc26cf04a9b7 req-8dcaf9bf-1d5c-476f-92b8-b3bf128ed431 service nova] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.043920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.044443] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1235.549801] env[62109]: DEBUG nova.compute.utils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1235.551327] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1235.551522] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1235.593719] env[62109]: DEBUG nova.policy [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73cb2c252e3f4e68a2767b349e0917e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df642ff4b7f247d09f80b260ed9ef53f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1235.854907] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Successfully created port: 61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1236.054759] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1237.065083] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1237.091934] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1237.092228] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1237.092522] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.092617] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1237.092716] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.092911] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1237.093251] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1237.093491] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1237.093683] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1237.093849] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1237.094043] env[62109]: DEBUG nova.virt.hardware [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1237.094917] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e46182-64d2-41af-b3ce-5cfb5080f652 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.103468] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ceda81-48a4-4d78-9816-0b3aa512570e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.222918] env[62109]: DEBUG nova.compute.manager [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Received event network-vif-plugged-61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1237.223015] env[62109]: DEBUG oslo_concurrency.lockutils [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] Acquiring lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.223208] env[62109]: DEBUG oslo_concurrency.lockutils [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.223378] env[62109]: DEBUG oslo_concurrency.lockutils [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.223561] env[62109]: DEBUG nova.compute.manager [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] No waiting events found dispatching network-vif-plugged-61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1237.223750] env[62109]: WARNING nova.compute.manager [req-9b066651-cc90-4476-9c5d-33c13a53fb80 req-1c6e5e70-190d-4ffd-85e2-79067203e73b service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Received unexpected event network-vif-plugged-61dc28f1-a0f3-4cdc-9261-567463eb808a for instance with vm_state building and task_state spawning. [ 1237.776643] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Successfully updated port: 61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.799477] env[62109]: DEBUG nova.compute.manager [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Received event network-changed-61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1237.799687] env[62109]: DEBUG nova.compute.manager [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Refreshing instance network info cache due to event network-changed-61dc28f1-a0f3-4cdc-9261-567463eb808a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1237.799878] env[62109]: DEBUG oslo_concurrency.lockutils [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] Acquiring lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.800059] env[62109]: DEBUG oslo_concurrency.lockutils [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] Acquired lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.800200] env[62109]: DEBUG nova.network.neutron [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Refreshing network info cache for port 61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1238.279646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.331113] env[62109]: DEBUG nova.network.neutron [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1238.397678] env[62109]: DEBUG nova.network.neutron [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.900889] env[62109]: DEBUG oslo_concurrency.lockutils [req-564121ff-ad7d-48c0-a6ae-9156d877938d req-b29deb37-11ac-4c8b-a82a-dc99c992d039 service nova] Releasing lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.901275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.901447] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1239.431910] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1239.550832] env[62109]: DEBUG nova.network.neutron [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Updating instance_info_cache with network_info: [{"id": "61dc28f1-a0f3-4cdc-9261-567463eb808a", "address": "fa:16:3e:06:9e:f7", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61dc28f1-a0", "ovs_interfaceid": "61dc28f1-a0f3-4cdc-9261-567463eb808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.053914] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "refresh_cache-811ab957-14d4-47d0-a44c-f9f44e166af4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.054359] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Instance network_info: |[{"id": "61dc28f1-a0f3-4cdc-9261-567463eb808a", "address": "fa:16:3e:06:9e:f7", "network": {"id": "46de2dbe-bb22-4927-bf8c-0375f536d173", "bridge": "br-int", "label": "tempest-ServersTestJSON-1883984839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df642ff4b7f247d09f80b260ed9ef53f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61dc28f1-a0", "ovs_interfaceid": "61dc28f1-a0f3-4cdc-9261-567463eb808a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1240.054877] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:9e:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61dc28f1-a0f3-4cdc-9261-567463eb808a', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1240.062509] env[62109]: DEBUG oslo.service.loopingcall [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.062803] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1240.063110] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57357c3a-e646-4515-b9a5-aa267d5c325e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.082741] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1240.082741] env[62109]: value = "task-1117399" [ 1240.082741] env[62109]: _type = "Task" [ 1240.082741] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.090190] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117399, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.594291] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117399, 'name': CreateVM_Task, 'duration_secs': 0.302915} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.594766] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1240.595120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.595298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.595646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1240.595908] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a13b5de-abe3-44d9-8f6b-4545dab4eb37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.600761] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1240.600761] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5286d8fc-4df2-3ad3-4124-a7e8c095e142" [ 1240.600761] env[62109]: _type = "Task" [ 1240.600761] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.608687] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5286d8fc-4df2-3ad3-4124-a7e8c095e142, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.110552] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5286d8fc-4df2-3ad3-4124-a7e8c095e142, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.110920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.111225] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1241.111523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.111730] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.111965] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1241.112279] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5368268-fdc4-4dae-ab73-da3b980eea5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.119830] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1241.120057] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1241.120747] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe70b93c-add7-4bf0-9844-d06f0aa0cb87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.125588] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1241.125588] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d789e6-695c-d441-5dd4-3364d6e841b7" [ 1241.125588] env[62109]: _type = "Task" [ 1241.125588] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.132655] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d789e6-695c-d441-5dd4-3364d6e841b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.635566] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d789e6-695c-d441-5dd4-3364d6e841b7, 'name': SearchDatastore_Task, 'duration_secs': 0.008178} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.636279] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e37cf44-52fe-470e-b8a3-e4718b9eb1fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.641548] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1241.641548] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fd20-498f-2cca-b64f-3ef625e230d8" [ 1241.641548] env[62109]: _type = "Task" [ 1241.641548] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.648807] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fd20-498f-2cca-b64f-3ef625e230d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.151935] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52b6fd20-498f-2cca-b64f-3ef625e230d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009058} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.152224] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.152486] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 811ab957-14d4-47d0-a44c-f9f44e166af4/811ab957-14d4-47d0-a44c-f9f44e166af4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1242.152748] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bde094ef-a9f6-4148-8a41-26fc97456983 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.159316] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1242.159316] env[62109]: value = "task-1117400" [ 1242.159316] env[62109]: _type = "Task" [ 1242.159316] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.166597] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.669678] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.409168} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.670062] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 811ab957-14d4-47d0-a44c-f9f44e166af4/811ab957-14d4-47d0-a44c-f9f44e166af4.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1242.670171] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1242.670418] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2e5da47-05cb-4df2-8f24-a2f6cdf3d06c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.677122] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1242.677122] env[62109]: value = "task-1117401" [ 1242.677122] env[62109]: _type = "Task" [ 1242.677122] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.684970] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.186614] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063263} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.186888] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1243.187687] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc43f3d-8d5f-4166-b4e8-cfa4c9b287d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.209079] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 811ab957-14d4-47d0-a44c-f9f44e166af4/811ab957-14d4-47d0-a44c-f9f44e166af4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1243.209314] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68026baf-180c-49af-a2b9-71882b56f52a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.229577] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1243.229577] env[62109]: value = "task-1117402" [ 1243.229577] env[62109]: _type = "Task" [ 1243.229577] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.237091] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.739545] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117402, 'name': ReconfigVM_Task, 'duration_secs': 0.32289} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.741067] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 811ab957-14d4-47d0-a44c-f9f44e166af4/811ab957-14d4-47d0-a44c-f9f44e166af4.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1243.741067] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c1c0c51-3cdb-4c6c-a50b-5ca538eb078a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.748075] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1243.748075] env[62109]: value = "task-1117403" [ 1243.748075] env[62109]: _type = "Task" [ 1243.748075] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.755227] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117403, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.258249] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117403, 'name': Rename_Task, 'duration_secs': 0.152467} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.258493] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1244.258735] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bbccdc5-3719-4990-add7-d31fdb374f5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.265451] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1244.265451] env[62109]: value = "task-1117404" [ 1244.265451] env[62109]: _type = "Task" [ 1244.265451] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.272719] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.775842] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117404, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.275712] env[62109]: DEBUG oslo_vmware.api [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117404, 'name': PowerOnVM_Task, 'duration_secs': 0.75189} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.275952] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1245.276180] env[62109]: INFO nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1245.276385] env[62109]: DEBUG nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1245.277136] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aa250d-687a-46e6-a0c1-6cb22620fd96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.794198] env[62109]: INFO nova.compute.manager [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Took 12.91 seconds to build instance. [ 1246.296421] env[62109]: DEBUG oslo_concurrency.lockutils [None req-09ad7fb1-c14c-4acb-89f4-cd29a6afd3ab tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.420s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.308068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "811ab957-14d4-47d0-a44c-f9f44e166af4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.308321] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.308513] env[62109]: DEBUG nova.compute.manager [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1246.309527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e9dbf5-e6d2-45b8-8867-323835525af1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.316254] env[62109]: DEBUG nova.compute.manager [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1246.316804] env[62109]: DEBUG nova.objects.instance [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'flavor' on Instance uuid 811ab957-14d4-47d0-a44c-f9f44e166af4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.822491] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1246.822491] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d679e30-15cb-447e-b42e-711efc945c3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.829551] env[62109]: DEBUG oslo_vmware.api [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1246.829551] env[62109]: value = "task-1117405" [ 1246.829551] env[62109]: _type = "Task" [ 1246.829551] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.838157] env[62109]: DEBUG oslo_vmware.api [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.339548] env[62109]: DEBUG oslo_vmware.api [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117405, 'name': PowerOffVM_Task, 'duration_secs': 0.198879} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.339766] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1247.339946] env[62109]: DEBUG nova.compute.manager [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1247.340670] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3e5c32-628d-4f49-81c3-2323d101cf69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.852076] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7d819f9d-bb60-4c74-9862-1d6e7023b8de tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.968644] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "811ab957-14d4-47d0-a44c-f9f44e166af4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.969357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.969593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.969787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.969961] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.972054] env[62109]: INFO nova.compute.manager [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Terminating instance [ 1248.973806] env[62109]: DEBUG nova.compute.manager [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1248.973997] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1248.974819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ec1750-0102-4419-a6bc-f4a2cab2adab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.982399] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1248.982624] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-756fc199-9f39-46ca-888d-9bae46ffc315 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.052701] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1249.052945] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1249.053111] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore2] 811ab957-14d4-47d0-a44c-f9f44e166af4 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.053372] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edd108f6-6412-4b8f-b097-bcca4171cf2d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.059135] env[62109]: DEBUG oslo_vmware.api [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1249.059135] env[62109]: value = "task-1117407" [ 1249.059135] env[62109]: _type = "Task" [ 1249.059135] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.066790] env[62109]: DEBUG oslo_vmware.api [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117407, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.568968] env[62109]: DEBUG oslo_vmware.api [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117407, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122204} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.569632] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.569848] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1249.570056] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1249.570243] env[62109]: INFO nova.compute.manager [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1249.570493] env[62109]: DEBUG oslo.service.loopingcall [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.570687] env[62109]: DEBUG nova.compute.manager [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1249.570781] env[62109]: DEBUG nova.network.neutron [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1249.847880] env[62109]: DEBUG nova.compute.manager [req-03866af9-d605-4082-b013-fcf8286c3534 req-f9c80323-1118-40f6-b970-c2ae51b30821 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Received event network-vif-deleted-61dc28f1-a0f3-4cdc-9261-567463eb808a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1249.848011] env[62109]: INFO nova.compute.manager [req-03866af9-d605-4082-b013-fcf8286c3534 req-f9c80323-1118-40f6-b970-c2ae51b30821 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Neutron deleted interface 61dc28f1-a0f3-4cdc-9261-567463eb808a; detaching it from the instance and deleting it from the info cache [ 1249.848197] env[62109]: DEBUG nova.network.neutron [req-03866af9-d605-4082-b013-fcf8286c3534 req-f9c80323-1118-40f6-b970-c2ae51b30821 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.324883] env[62109]: DEBUG nova.network.neutron [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.351018] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45320d98-cccb-4fed-b9d2-dbbab4c7c4ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.361042] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7315c5f-b240-42d5-89e3-37615ebe8e2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.385883] env[62109]: DEBUG nova.compute.manager [req-03866af9-d605-4082-b013-fcf8286c3534 req-f9c80323-1118-40f6-b970-c2ae51b30821 service nova] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Detach interface failed, port_id=61dc28f1-a0f3-4cdc-9261-567463eb808a, reason: Instance 811ab957-14d4-47d0-a44c-f9f44e166af4 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1250.827798] env[62109]: INFO nova.compute.manager [-] [instance: 811ab957-14d4-47d0-a44c-f9f44e166af4] Took 1.26 seconds to deallocate network for instance. [ 1251.333906] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.334180] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.334411] env[62109]: DEBUG nova.objects.instance [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 811ab957-14d4-47d0-a44c-f9f44e166af4 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.909807] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4148e3e4-3207-4d5b-bbbb-b83b0e5d379b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.917346] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3825df00-0428-4e14-b2f9-8ae9fc91fbce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.946819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0fc480-123a-4476-b353-36ebc1ef81d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.954443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8efb0fe-659e-414c-b9bf-982f68f288f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.967411] env[62109]: DEBUG nova.compute.provider_tree [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.470776] env[62109]: DEBUG nova.scheduler.client.report [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1252.975535] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.993718] env[62109]: INFO nova.scheduler.client.report [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 811ab957-14d4-47d0-a44c-f9f44e166af4 [ 1253.501113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-7651f9b7-9ebc-4626-958c-69bdbe3048f8 tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "811ab957-14d4-47d0-a44c-f9f44e166af4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.532s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.719794] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.720223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.720223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.720309] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.720467] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.722705] env[62109]: INFO nova.compute.manager [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Terminating instance [ 1254.724451] env[62109]: DEBUG nova.compute.manager [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1254.724644] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1254.725468] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b0880-158e-4c96-b877-3f7e2121108a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.733888] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1254.734120] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff6a0d99-b6a3-42c7-80e0-b73a1d4773c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.740412] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1254.740412] env[62109]: value = "task-1117408" [ 1254.740412] env[62109]: _type = "Task" [ 1254.740412] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.747902] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.250947] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117408, 'name': PowerOffVM_Task, 'duration_secs': 0.198943} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.251206] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1255.251436] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1255.251677] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93666a0c-76a0-471f-bbf6-1777716a53c0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.312500] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1255.312717] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1255.312899] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleting the datastore file [datastore2] 8a64a700-e381-49a0-89ae-8a678ed7a4fb {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1255.313173] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-908a61a7-9349-466b-84a4-3a86ee2744a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.319853] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for the task: (returnval){ [ 1255.319853] env[62109]: value = "task-1117410" [ 1255.319853] env[62109]: _type = "Task" [ 1255.319853] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.327280] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.830553] env[62109]: DEBUG oslo_vmware.api [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Task: {'id': task-1117410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13291} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.830949] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1255.831021] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1255.831183] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1255.831371] env[62109]: INFO nova.compute.manager [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1255.831647] env[62109]: DEBUG oslo.service.loopingcall [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.831846] env[62109]: DEBUG nova.compute.manager [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1255.831940] env[62109]: DEBUG nova.network.neutron [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1256.062809] env[62109]: DEBUG nova.compute.manager [req-96ff4971-c918-40bc-893c-32714fae1ece req-defc3929-7e81-4eb1-98c3-bff8148a93ca service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Received event network-vif-deleted-0674b2a4-d58e-4dcb-b770-308e0b503998 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1256.063118] env[62109]: INFO nova.compute.manager [req-96ff4971-c918-40bc-893c-32714fae1ece req-defc3929-7e81-4eb1-98c3-bff8148a93ca service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Neutron deleted interface 0674b2a4-d58e-4dcb-b770-308e0b503998; detaching it from the instance and deleting it from the info cache [ 1256.063365] env[62109]: DEBUG nova.network.neutron [req-96ff4971-c918-40bc-893c-32714fae1ece req-defc3929-7e81-4eb1-98c3-bff8148a93ca service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.536853] env[62109]: DEBUG nova.network.neutron [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.566380] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8bc09d9-5cf4-4e42-9634-d27c4c08a0fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.577705] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a1fbb5-0fd8-4053-a23e-f185c7743af7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.604498] env[62109]: DEBUG nova.compute.manager [req-96ff4971-c918-40bc-893c-32714fae1ece req-defc3929-7e81-4eb1-98c3-bff8148a93ca service nova] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Detach interface failed, port_id=0674b2a4-d58e-4dcb-b770-308e0b503998, reason: Instance 8a64a700-e381-49a0-89ae-8a678ed7a4fb could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1257.040016] env[62109]: INFO nova.compute.manager [-] [instance: 8a64a700-e381-49a0-89ae-8a678ed7a4fb] Took 1.21 seconds to deallocate network for instance. [ 1257.546709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.547088] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.547365] env[62109]: DEBUG nova.objects.instance [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lazy-loading 'resources' on Instance uuid 8a64a700-e381-49a0-89ae-8a678ed7a4fb {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.110629] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7777e5-5fd3-41a4-a007-23b0eeea4442 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.117953] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5b680c-b3ef-4e45-ae34-978ee1322683 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.149383] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a9f2f1-8c15-42e3-9256-b6637b428380 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.156519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8b6926-d876-41fb-92a0-cf5a013a5f0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.169371] env[62109]: DEBUG nova.compute.provider_tree [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.672444] env[62109]: DEBUG nova.scheduler.client.report [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1259.178206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.197952] env[62109]: INFO nova.scheduler.client.report [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Deleted allocations for instance 8a64a700-e381-49a0-89ae-8a678ed7a4fb [ 1259.708299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5eedb68e-f4b7-4a47-8af7-674948aa6d0f tempest-ServersTestJSON-632393265 tempest-ServersTestJSON-632393265-project-member] Lock "8a64a700-e381-49a0-89ae-8a678ed7a4fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.988s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.806704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.807145] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.807200] env[62109]: DEBUG nova.compute.manager [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1265.808183] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f1196c-bdf3-4158-84e5-622ee52e0b53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.814523] env[62109]: DEBUG nova.compute.manager [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62109) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1265.815093] env[62109]: DEBUG nova.objects.instance [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'flavor' on Instance uuid 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.319723] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1266.319976] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57e41761-cbae-44bf-9cb2-6b5d17cb0eec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.327100] env[62109]: DEBUG oslo_vmware.api [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1266.327100] env[62109]: value = "task-1117411" [ 1266.327100] env[62109]: _type = "Task" [ 1266.327100] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.336988] env[62109]: DEBUG oslo_vmware.api [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.837405] env[62109]: DEBUG oslo_vmware.api [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117411, 'name': PowerOffVM_Task, 'duration_secs': 0.214067} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.837848] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1266.837848] env[62109]: DEBUG nova.compute.manager [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1266.838588] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ce33d-95bf-4074-acb8-ef2e5dbf2dcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.350313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2772dfe0-2b05-43c0-8fc7-a33dca121afb tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.543s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.191585] env[62109]: DEBUG nova.objects.instance [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'flavor' on Instance uuid 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.696530] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.696750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.696940] env[62109]: DEBUG nova.network.neutron [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1268.697144] env[62109]: DEBUG nova.objects.instance [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'info_cache' on Instance uuid 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.200541] env[62109]: DEBUG nova.objects.base [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Object Instance<88fb92c9-9587-4d2b-b090-451e8f5b93ab> lazy-loaded attributes: flavor,info_cache {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1269.892901] env[62109]: DEBUG nova.network.neutron [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.396019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.899736] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1270.899997] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e5cbc7c-8281-41f0-af41-ddc10ebd5b19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.908151] env[62109]: DEBUG oslo_vmware.api [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1270.908151] env[62109]: value = "task-1117412" [ 1270.908151] env[62109]: _type = "Task" [ 1270.908151] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.915593] env[62109]: DEBUG oslo_vmware.api [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.418283] env[62109]: DEBUG oslo_vmware.api [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117412, 'name': PowerOnVM_Task, 'duration_secs': 0.378304} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.418662] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1271.418780] env[62109]: DEBUG nova.compute.manager [None req-f075c8a8-9cd3-44b3-b5f1-f59289470a40 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1271.419585] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb60665-b294-43cb-9e18-97827760fb3a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.777593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.777881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.777985] env[62109]: INFO nova.compute.manager [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Shelving [ 1273.275939] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122c986-d57d-4d43-a861-601244fea084 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.284294] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1273.284541] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11d44800-0ee7-4131-b033-ce88c31bc442 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.287405] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Suspending the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1273.287972] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-655859f5-dbc6-44d7-b984-ae42e52208d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.292326] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1273.292326] env[62109]: value = "task-1117413" [ 1273.292326] env[62109]: _type = "Task" [ 1273.292326] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.296327] env[62109]: DEBUG oslo_vmware.api [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1273.296327] env[62109]: value = "task-1117414" [ 1273.296327] env[62109]: _type = "Task" [ 1273.296327] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.302010] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.308973] env[62109]: DEBUG oslo_vmware.api [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117414, 'name': SuspendVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.803391] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117413, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.807791] env[62109]: DEBUG oslo_vmware.api [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117414, 'name': SuspendVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.303929] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117413, 'name': PowerOffVM_Task, 'duration_secs': 0.731313} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.304559] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1274.305336] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0a5928-7a11-4821-922e-8fc8a9159e37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.310461] env[62109]: DEBUG oslo_vmware.api [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117414, 'name': SuspendVM_Task, 'duration_secs': 0.736444} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.310996] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Suspended the VM {{(pid=62109) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1274.311207] env[62109]: DEBUG nova.compute.manager [None req-7f919e23-3fe3-4d83-b5ec-350eb6756308 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1274.311911] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406d262a-c815-401e-a15f-0841f139bd80 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.334274] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0194624e-123a-43de-baa2-5e40e84220b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.850757] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Creating Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1274.851199] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0c57e8b6-6e7d-4a68-8f10-4d7f275a4a18 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.859854] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1274.859854] env[62109]: value = "task-1117415" [ 1274.859854] env[62109]: _type = "Task" [ 1274.859854] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.868378] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117415, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.371682] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117415, 'name': CreateSnapshot_Task, 'duration_secs': 0.433761} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.371933] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Created Snapshot of the VM instance {{(pid=62109) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1275.372664] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da021eb5-770d-41d5-9925-1babbf47be19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.630162] env[62109]: INFO nova.compute.manager [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Resuming [ 1275.630781] env[62109]: DEBUG nova.objects.instance [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'flavor' on Instance uuid 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.889441] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Creating linked-clone VM from snapshot {{(pid=62109) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1275.889804] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6fef28a9-8bad-44bd-932c-50b854dcff45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.898266] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1275.898266] env[62109]: value = "task-1117416" [ 1275.898266] env[62109]: _type = "Task" [ 1275.898266] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.906267] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117416, 'name': CloneVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.408732] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117416, 'name': CloneVM_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.639561] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.639805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquired lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.639855] env[62109]: DEBUG nova.network.neutron [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1276.908794] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117416, 'name': CloneVM_Task, 'duration_secs': 0.989386} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.909179] env[62109]: INFO nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Created linked-clone VM from snapshot [ 1276.909816] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ae1fac-1f04-49fd-90f8-95eac279821e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.918175] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Uploading image c70fef60-128e-462e-822d-a04d0800cff1 {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1276.946101] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1276.946101] env[62109]: value = "vm-244579" [ 1276.946101] env[62109]: _type = "VirtualMachine" [ 1276.946101] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1276.946526] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9651152a-f891-4b65-9f92-bfc56a0925d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.956612] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease: (returnval){ [ 1276.956612] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e7915-5324-dce4-561e-f23b3a636c0b" [ 1276.956612] env[62109]: _type = "HttpNfcLease" [ 1276.956612] env[62109]: } obtained for exporting VM: (result){ [ 1276.956612] env[62109]: value = "vm-244579" [ 1276.956612] env[62109]: _type = "VirtualMachine" [ 1276.956612] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1276.957087] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the lease: (returnval){ [ 1276.957087] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e7915-5324-dce4-561e-f23b3a636c0b" [ 1276.957087] env[62109]: _type = "HttpNfcLease" [ 1276.957087] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1276.963731] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1276.963731] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e7915-5324-dce4-561e-f23b3a636c0b" [ 1276.963731] env[62109]: _type = "HttpNfcLease" [ 1276.963731] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1277.361722] env[62109]: DEBUG nova.network.neutron [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [{"id": "b5890440-3f47-469f-beae-cd3ca9b067d6", "address": "fa:16:3e:b3:e8:15", "network": {"id": "d3eaeb90-2414-45c1-9732-4ec852512f2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-980789390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9e5867b8b7e4ed18c5395baf46db66f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5890440-3f", "ovs_interfaceid": "b5890440-3f47-469f-beae-cd3ca9b067d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.465205] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1277.465205] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e7915-5324-dce4-561e-f23b3a636c0b" [ 1277.465205] env[62109]: _type = "HttpNfcLease" [ 1277.465205] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1277.465550] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1277.465550] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]524e7915-5324-dce4-561e-f23b3a636c0b" [ 1277.465550] env[62109]: _type = "HttpNfcLease" [ 1277.465550] env[62109]: }. {{(pid=62109) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1277.466412] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b91478-2409-4420-a1e9-d9db525fd159 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.474124] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1277.474274] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk for reading. {{(pid=62109) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1277.565622] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6a5aa8b0-b240-475a-9615-2beaef68a0b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.865818] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Releasing lock "refresh_cache-88fb92c9-9587-4d2b-b090-451e8f5b93ab" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.866853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b1cbbd-a03c-4270-8738-524e2630e3aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.874852] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Resuming the VM {{(pid=62109) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1277.875246] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b1290a0-e8c2-4ce0-9c65-38c4e3ffff50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.883189] env[62109]: DEBUG oslo_vmware.api [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1277.883189] env[62109]: value = "task-1117418" [ 1277.883189] env[62109]: _type = "Task" [ 1277.883189] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.893358] env[62109]: DEBUG oslo_vmware.api [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.393937] env[62109]: DEBUG oslo_vmware.api [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117418, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.895660] env[62109]: DEBUG oslo_vmware.api [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117418, 'name': PowerOnVM_Task, 'duration_secs': 0.542538} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.895874] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Resumed the VM {{(pid=62109) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1278.896166] env[62109]: DEBUG nova.compute.manager [None req-aedf9733-ae21-42fe-a824-28d6e549b677 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1278.897038] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642178df-b836-4ae1-bd1c-808912491872 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.338755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.339169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.339333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.339654] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.339949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.342270] env[62109]: INFO nova.compute.manager [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Terminating instance [ 1280.344111] env[62109]: DEBUG nova.compute.manager [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1280.344312] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1280.345156] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13c3e85-dfec-4ba1-9e27-68f9ebba2cc2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.352722] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1280.352977] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c735ff4e-e05a-412b-99fa-5dfbc1e68d15 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.361163] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1280.361163] env[62109]: value = "task-1117419" [ 1280.361163] env[62109]: _type = "Task" [ 1280.361163] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.369344] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.646543] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.871786] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117419, 'name': PowerOffVM_Task, 'duration_secs': 0.219962} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.872113] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1280.872339] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1280.872613] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e1e0ebc-f8c2-4bbe-8d4d-83ef997b4bd1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.947270] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1280.947511] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1280.947709] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleting the datastore file [datastore1] 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1280.948015] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88bea535-428b-4d67-8cbb-bca5c8889dd8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.954685] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for the task: (returnval){ [ 1280.954685] env[62109]: value = "task-1117421" [ 1280.954685] env[62109]: _type = "Task" [ 1280.954685] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.962668] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.463863] env[62109]: DEBUG oslo_vmware.api [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Task: {'id': task-1117421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.464270] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.464380] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1281.464560] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1281.464740] env[62109]: INFO nova.compute.manager [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1281.464991] env[62109]: DEBUG oslo.service.loopingcall [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.465201] env[62109]: DEBUG nova.compute.manager [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1281.465297] env[62109]: DEBUG nova.network.neutron [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1281.930919] env[62109]: DEBUG nova.compute.manager [req-da447b54-7b79-4630-9d26-cad6fda7604e req-88d9b914-0b0b-4ef6-b4bb-f9dd544a73e6 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Received event network-vif-deleted-b5890440-3f47-469f-beae-cd3ca9b067d6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1281.931082] env[62109]: INFO nova.compute.manager [req-da447b54-7b79-4630-9d26-cad6fda7604e req-88d9b914-0b0b-4ef6-b4bb-f9dd544a73e6 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Neutron deleted interface b5890440-3f47-469f-beae-cd3ca9b067d6; detaching it from the instance and deleting it from the info cache [ 1281.931248] env[62109]: DEBUG nova.network.neutron [req-da447b54-7b79-4630-9d26-cad6fda7604e req-88d9b914-0b0b-4ef6-b4bb-f9dd544a73e6 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.406617] env[62109]: DEBUG nova.network.neutron [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.434185] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33e594d6-22d0-4c9f-8817-0cd563c700fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.443797] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed27b2bd-54c3-4730-a3cd-7da513322fe9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.470069] env[62109]: DEBUG nova.compute.manager [req-da447b54-7b79-4630-9d26-cad6fda7604e req-88d9b914-0b0b-4ef6-b4bb-f9dd544a73e6 service nova] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Detach interface failed, port_id=b5890440-3f47-469f-beae-cd3ca9b067d6, reason: Instance 88fb92c9-9587-4d2b-b090-451e8f5b93ab could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1282.646729] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.646965] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.647148] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.909406] env[62109]: INFO nova.compute.manager [-] [instance: 88fb92c9-9587-4d2b-b090-451e8f5b93ab] Took 1.44 seconds to deallocate network for instance. [ 1283.417516] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.417762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.417981] env[62109]: DEBUG nova.objects.instance [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lazy-loading 'resources' on Instance uuid 88fb92c9-9587-4d2b-b090-451e8f5b93ab {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.979400] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3964e655-6b33-4eca-aa35-6ee89c894a93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.987746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cd59f0-4cf2-4690-acdc-eb03a5d942ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.020157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84f7ebe-390f-479e-8733-bcc612c338c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.028041] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536d378c-6c33-410e-bb85-441596e2b253 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.042363] env[62109]: DEBUG nova.compute.provider_tree [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1284.545754] env[62109]: DEBUG nova.scheduler.client.report [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1284.646679] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.646863] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1284.646984] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.051556] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.076461] env[62109]: INFO nova.scheduler.client.report [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Deleted allocations for instance 88fb92c9-9587-4d2b-b090-451e8f5b93ab [ 1285.167144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.167306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.167456] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1285.167613] env[62109]: DEBUG nova.objects.instance [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lazy-loading 'info_cache' on Instance uuid dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1285.585261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1c9107d6-359a-4421-b90e-0fb3cafe14a9 tempest-ServerActionsTestJSON-811768397 tempest-ServerActionsTestJSON-811768397-project-member] Lock "88fb92c9-9587-4d2b-b090-451e8f5b93ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.246s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.026846] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1286.027940] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6ca23b-d0f7-4a16-bf3a-dac2433c6613 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.034826] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1286.034941] env[62109]: ERROR oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk due to incomplete transfer. [ 1286.035162] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eaf3f830-3f6e-40ac-8d18-e5027f7b9d93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.042572] env[62109]: DEBUG oslo_vmware.rw_handles [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52696843-575f-1e74-2c90-300beed8d441/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1286.042717] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Uploaded image c70fef60-128e-462e-822d-a04d0800cff1 to the Glance image server {{(pid=62109) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1286.044961] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Destroying the VM {{(pid=62109) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1286.045205] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b2e2cbd4-b4f7-4eec-800b-70faee091496 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.050446] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1286.050446] env[62109]: value = "task-1117422" [ 1286.050446] env[62109]: _type = "Task" [ 1286.050446] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.057952] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117422, 'name': Destroy_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.193728] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1286.561327] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117422, 'name': Destroy_Task, 'duration_secs': 0.317981} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.561709] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Destroyed the VM [ 1286.562040] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleting Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1286.562382] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5cc3644b-a0a9-47bb-8b3e-c601b6e4b730 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.569849] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1286.569849] env[62109]: value = "task-1117423" [ 1286.569849] env[62109]: _type = "Task" [ 1286.569849] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.579855] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117423, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.775693] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.080973] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117423, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.277938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.278186] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1287.278413] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.278576] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.278706] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1287.278847] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.581877] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117423, 'name': RemoveSnapshot_Task, 'duration_secs': 0.83828} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.582245] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleted Snapshot of the VM instance {{(pid=62109) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1287.582611] env[62109]: DEBUG nova.compute.manager [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1287.583495] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df275b4-0de9-4bd8-ba4c-8d25a7446d0e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.781833] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.782097] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.782268] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.782548] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1287.783364] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3590d3-c129-4da9-9ff1-92091916931f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.791728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ed363d-3b92-4e8d-b8e2-541cd28ba18f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.806520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a6fdec-b894-47a3-9349-dab0c762157e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.814582] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ccfb2d-ed9a-4c56-8837-6277d914c128 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.844217] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1287.844370] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.844564] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.094783] env[62109]: INFO nova.compute.manager [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Shelve offloading [ 1288.096458] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1288.096712] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bd9230d-8bc9-4f9b-bde8-c56c51ddb2e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.104516] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1288.104516] env[62109]: value = "task-1117424" [ 1288.104516] env[62109]: _type = "Task" [ 1288.104516] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.112162] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.617900] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] VM already powered off {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1288.618174] env[62109]: DEBUG nova.compute.manager [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1288.618918] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e316c1-3e83-4dc1-9ca1-3eeeda5d69c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.624894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.625089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.625392] env[62109]: DEBUG nova.network.neutron [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1288.869656] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.869819] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance cbc1367e-3d62-4e33-aaad-5112319c1326 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.870011] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1288.870158] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1288.912831] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e54bf2-7682-42fd-9c60-37bb7c1c42a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.920401] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7610e085-ffba-4e13-b171-5203b1b41626 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.950537] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c963b845-72c1-43e8-a23d-7993fba378e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.958155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a823a3cc-0e26-4d37-9021-6d08d300d0b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.972594] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.354455] env[62109]: DEBUG nova.network.neutron [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.474416] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.856618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.980091] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1289.980274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.136s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.110177] env[62109]: DEBUG nova.compute.manager [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-vif-unplugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1290.110406] env[62109]: DEBUG oslo_concurrency.lockutils [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.110615] env[62109]: DEBUG oslo_concurrency.lockutils [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.110790] env[62109]: DEBUG oslo_concurrency.lockutils [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.110980] env[62109]: DEBUG nova.compute.manager [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] No waiting events found dispatching network-vif-unplugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1290.111151] env[62109]: WARNING nova.compute.manager [req-4891e8f8-cc8c-4334-95a2-aa691acd9d0e req-4fec63bf-2faf-4344-b4e8-145843470e98 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received unexpected event network-vif-unplugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 for instance with vm_state shelved and task_state shelving_offloading. [ 1290.199748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1290.200706] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04da0d5f-3c44-4087-90bb-7819303010f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.208895] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1290.209156] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-027be2ef-c03f-45d1-a348-147bdf56b480 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.714928] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1291.715261] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1291.717014] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore1] cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.717014] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0b531f1-db52-48cc-93fa-839c725b3104 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.721967] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1291.721967] env[62109]: value = "task-1117426" [ 1291.721967] env[62109]: _type = "Task" [ 1291.721967] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.730606] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.975660] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.137194] env[62109]: DEBUG nova.compute.manager [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1292.137194] env[62109]: DEBUG nova.compute.manager [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing instance network info cache due to event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1292.137344] env[62109]: DEBUG oslo_concurrency.lockutils [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.137509] env[62109]: DEBUG oslo_concurrency.lockutils [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.137678] env[62109]: DEBUG nova.network.neutron [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1292.229920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "9ea97120-d2f5-43e9-a929-8f416f735268" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.230181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.237692] env[62109]: DEBUG oslo_vmware.api [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151662} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.237937] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.238148] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1292.238328] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1292.256574] env[62109]: INFO nova.scheduler.client.report [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted allocations for instance cbc1367e-3d62-4e33-aaad-5112319c1326 [ 1292.735346] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1292.760543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.760827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.761077] env[62109]: DEBUG nova.objects.instance [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'resources' on Instance uuid cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.894217] env[62109]: DEBUG nova.network.neutron [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updated VIF entry in instance network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1292.894319] env[62109]: DEBUG nova.network.neutron [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapea50f1f4-69", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.257628] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.266752] env[62109]: DEBUG nova.objects.instance [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'numa_topology' on Instance uuid cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.397577] env[62109]: DEBUG oslo_concurrency.lockutils [req-a9aa4a13-a1a3-4706-9d71-fe46d665eaf0 req-7b9c674c-dd50-49cb-9458-11f2fdad022f service nova] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.769646] env[62109]: DEBUG nova.objects.base [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62109) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1293.810349] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1d1231-992c-4613-8069-e9dcdf300b03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.818687] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298da1db-a4fc-4728-8b4e-2e8180b3c229 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.849567] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005ba96d-94e3-4237-b229-3c041f1e3b56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.859426] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83ed7ce-e990-4476-ba77-aa039d6af2b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.874924] env[62109]: DEBUG nova.compute.provider_tree [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.378252] env[62109]: DEBUG nova.scheduler.client.report [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1294.883168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.885755] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.628s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.887207] env[62109]: INFO nova.compute.claims [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.999145] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.396262] env[62109]: DEBUG oslo_concurrency.lockutils [None req-116e7479-fa11-4818-ac02-58f05cf71f7e tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.618s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.397172] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.398s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.397332] env[62109]: INFO nova.compute.manager [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Unshelving [ 1295.952436] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6d43ca-538e-4943-8a4e-890bca936b19 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.960200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea3e0f9-3626-4a20-b6c1-cb8422e5d3a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.990007] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d969e14d-a519-45b4-b680-ca05e9c2efdb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.997373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b57e49-6ff9-4151-b135-5a1689bc5413 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.010612] env[62109]: DEBUG nova.compute.provider_tree [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.421887] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.513400] env[62109]: DEBUG nova.scheduler.client.report [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1297.018990] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.019309] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1297.021877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.600s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.022092] env[62109]: DEBUG nova.objects.instance [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'pci_requests' on Instance uuid cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1297.524627] env[62109]: DEBUG nova.compute.utils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1297.527352] env[62109]: DEBUG nova.objects.instance [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'numa_topology' on Instance uuid cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1297.528426] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1297.528593] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1297.565902] env[62109]: DEBUG nova.policy [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75cd946987cb4bb38cb411a0ac53acd9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4692d9c2f59c45389eac0d2572856750', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:201}} [ 1297.801949] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Successfully created port: 3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1298.030102] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1298.034417] env[62109]: INFO nova.compute.claims [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.044414] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1299.074332] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T07:50:16Z,direct_url=,disk_format='vmdk',id=6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2d2be1e2322b4c87945fff0cd79d3c7b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T07:50:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1299.074506] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1299.074674] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1299.074866] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1299.075034] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1299.075198] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1299.075418] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1299.075567] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1299.075731] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1299.075897] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1299.076096] env[62109]: DEBUG nova.virt.hardware [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1299.076983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527e80c0-be9b-470f-8108-80133ee7d218 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.086984] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497fa3cd-9523-41fe-9781-a21eb18a1afe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.103785] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60293d8-86c7-4f49-a54d-dacfc1ed30a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.110055] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa397611-dfbb-430c-a44e-d98184662a27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.139115] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c352406-b50f-4003-9003-ecc7b9a9d836 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.146159] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55037d4a-19ac-491a-b6f4-3b8ae133f13f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.160009] env[62109]: DEBUG nova.compute.provider_tree [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.238491] env[62109]: DEBUG nova.compute.manager [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Received event network-vif-plugged-3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1299.238707] env[62109]: DEBUG oslo_concurrency.lockutils [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] Acquiring lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.239226] env[62109]: DEBUG oslo_concurrency.lockutils [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] Lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.239460] env[62109]: DEBUG oslo_concurrency.lockutils [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] Lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.240213] env[62109]: DEBUG nova.compute.manager [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] No waiting events found dispatching network-vif-plugged-3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1299.240213] env[62109]: WARNING nova.compute.manager [req-898be302-4b21-4a71-b870-91033398c51f req-1408d720-64af-43e6-866a-5a851c42f65e service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Received unexpected event network-vif-plugged-3996353f-47fa-4159-a6b8-4f78c5718559 for instance with vm_state building and task_state spawning. [ 1299.339018] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Successfully updated port: 3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1299.663247] env[62109]: DEBUG nova.scheduler.client.report [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1299.842133] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1299.842307] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquired lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.842479] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1300.168443] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.146s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.197010] env[62109]: INFO nova.network.neutron [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1300.377834] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1300.500575] env[62109]: DEBUG nova.network.neutron [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Updating instance_info_cache with network_info: [{"id": "3996353f-47fa-4159-a6b8-4f78c5718559", "address": "fa:16:3e:23:85:56", "network": {"id": "7176329b-d04c-41ed-8d11-49ac0929d5c4", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-57717765-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4692d9c2f59c45389eac0d2572856750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3996353f-47", "ovs_interfaceid": "3996353f-47fa-4159-a6b8-4f78c5718559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.003019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Releasing lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.003430] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Instance network_info: |[{"id": "3996353f-47fa-4159-a6b8-4f78c5718559", "address": "fa:16:3e:23:85:56", "network": {"id": "7176329b-d04c-41ed-8d11-49ac0929d5c4", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-57717765-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4692d9c2f59c45389eac0d2572856750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3996353f-47", "ovs_interfaceid": "3996353f-47fa-4159-a6b8-4f78c5718559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1301.003887] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:85:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3996353f-47fa-4159-a6b8-4f78c5718559', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1301.011207] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Creating folder: Project (4692d9c2f59c45389eac0d2572856750). Parent ref: group-v244329. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1301.011480] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd89c409-e8a4-457d-b111-4feacc27ff8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.023327] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Created folder: Project (4692d9c2f59c45389eac0d2572856750) in parent group-v244329. [ 1301.023384] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Creating folder: Instances. Parent ref: group-v244580. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1301.023545] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2c828c0-a0d2-4b82-9eea-c7d9c2e5a0d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.032255] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Created folder: Instances in parent group-v244580. [ 1301.032475] env[62109]: DEBUG oslo.service.loopingcall [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1301.032660] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1301.032849] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9be2bb40-b0b0-40a1-ae9b-1fe72b46a8fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.050481] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1301.050481] env[62109]: value = "task-1117429" [ 1301.050481] env[62109]: _type = "Task" [ 1301.050481] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.057478] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117429, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.268109] env[62109]: DEBUG nova.compute.manager [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Received event network-changed-3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1301.268361] env[62109]: DEBUG nova.compute.manager [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Refreshing instance network info cache due to event network-changed-3996353f-47fa-4159-a6b8-4f78c5718559. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1301.268594] env[62109]: DEBUG oslo_concurrency.lockutils [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] Acquiring lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.268757] env[62109]: DEBUG oslo_concurrency.lockutils [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] Acquired lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.268948] env[62109]: DEBUG nova.network.neutron [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Refreshing network info cache for port 3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1301.560870] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117429, 'name': CreateVM_Task, 'duration_secs': 0.355292} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.560870] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1301.561371] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.561764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.562058] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1301.562402] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33c93a6a-703f-4d9a-9e82-bec6204c961c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.567179] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1301.567179] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52258e2d-24b1-996c-ca3d-83ebb6d0659d" [ 1301.567179] env[62109]: _type = "Task" [ 1301.567179] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.575324] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52258e2d-24b1-996c-ca3d-83ebb6d0659d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.721718] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.721883] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.722085] env[62109]: DEBUG nova.network.neutron [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1301.956199] env[62109]: DEBUG nova.network.neutron [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Updated VIF entry in instance network info cache for port 3996353f-47fa-4159-a6b8-4f78c5718559. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1301.956586] env[62109]: DEBUG nova.network.neutron [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Updating instance_info_cache with network_info: [{"id": "3996353f-47fa-4159-a6b8-4f78c5718559", "address": "fa:16:3e:23:85:56", "network": {"id": "7176329b-d04c-41ed-8d11-49ac0929d5c4", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-57717765-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4692d9c2f59c45389eac0d2572856750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3996353f-47", "ovs_interfaceid": "3996353f-47fa-4159-a6b8-4f78c5718559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.078996] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52258e2d-24b1-996c-ca3d-83ebb6d0659d, 'name': SearchDatastore_Task, 'duration_secs': 0.018261} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.079260] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.079485] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Processing image 6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1302.079721] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.079871] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.080063] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1302.080319] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c66eb2c-a38e-4763-ba02-5ed800311360 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.087654] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1302.087831] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1302.088501] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69996bdc-08e7-47be-b715-e16f8665a1e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.092952] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1302.092952] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d7cee1-abf7-f89b-0ddf-5dad53b43326" [ 1302.092952] env[62109]: _type = "Task" [ 1302.092952] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.100134] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d7cee1-abf7-f89b-0ddf-5dad53b43326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.422822] env[62109]: DEBUG nova.network.neutron [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.459104] env[62109]: DEBUG oslo_concurrency.lockutils [req-92e17f35-105e-4829-8b7d-dbabecd8f421 req-0e307bce-6c7d-42ec-8228-ce4bc8c9b792 service nova] Releasing lock "refresh_cache-9ea97120-d2f5-43e9-a929-8f416f735268" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.605448] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52d7cee1-abf7-f89b-0ddf-5dad53b43326, 'name': SearchDatastore_Task, 'duration_secs': 0.007747} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.606245] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcf9b7c6-52d4-41ad-a767-9102e1f90ae2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.612463] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1302.612463] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5270cfb0-af8a-e89a-325a-0381fc6c9064" [ 1302.612463] env[62109]: _type = "Task" [ 1302.612463] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.619879] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5270cfb0-af8a-e89a-325a-0381fc6c9064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.926214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.951454] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T07:50:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9343377c15f3fab5df18928cf63debef',container_format='bare',created_at=2024-10-03T08:04:22Z,direct_url=,disk_format='vmdk',id=c70fef60-128e-462e-822d-a04d0800cff1,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1202038140-shelved',owner='430353b9a427408494b462b49f11354a',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-10-03T08:04:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1302.951702] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1302.951886] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.952158] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1302.952322] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.952476] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1302.952685] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1302.952848] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1302.953028] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1302.953200] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1302.953374] env[62109]: DEBUG nova.virt.hardware [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1302.954500] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64751fd-3501-4a00-9f2b-91f3bb82f966 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.962382] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c073bf9c-1522-48a7-94be-bab31c1c9afc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.975238] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:6e:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea50f1f4-6955-463b-8cb2-d2e7ebbb6335', 'vif_model': 'vmxnet3'}] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1302.982352] env[62109]: DEBUG oslo.service.loopingcall [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1302.982575] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1302.982760] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e38210c-972b-4fd9-8289-69e1c08e66d4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.002237] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.002237] env[62109]: value = "task-1117430" [ 1303.002237] env[62109]: _type = "Task" [ 1303.002237] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.009281] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117430, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.123555] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]5270cfb0-af8a-e89a-325a-0381fc6c9064, 'name': SearchDatastore_Task, 'duration_secs': 0.00898} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.123775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.124022] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 9ea97120-d2f5-43e9-a929-8f416f735268/9ea97120-d2f5-43e9-a929-8f416f735268.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1303.124292] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e77985ae-3617-42fc-8bd1-3ef293f22bdc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.130226] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1303.130226] env[62109]: value = "task-1117431" [ 1303.130226] env[62109]: _type = "Task" [ 1303.130226] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.137517] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.297189] env[62109]: DEBUG nova.compute.manager [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1303.297436] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.297675] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.297877] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.297984] env[62109]: DEBUG nova.compute.manager [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] No waiting events found dispatching network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1303.298194] env[62109]: WARNING nova.compute.manager [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received unexpected event network-vif-plugged-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 for instance with vm_state shelved_offloaded and task_state spawning. [ 1303.298380] env[62109]: DEBUG nova.compute.manager [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1303.298557] env[62109]: DEBUG nova.compute.manager [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing instance network info cache due to event network-changed-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1303.298754] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.298896] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.299091] env[62109]: DEBUG nova.network.neutron [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Refreshing network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1303.514824] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-1117430, 'name': CreateVM_Task, 'duration_secs': 0.325232} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.515236] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1303.515789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.515997] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.516489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1303.516795] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5cc5047-f233-4431-b831-ddf445941fbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.522079] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1303.522079] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527bd1b5-644b-d627-c3f6-d96ee76a221e" [ 1303.522079] env[62109]: _type = "Task" [ 1303.522079] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.530069] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]527bd1b5-644b-d627-c3f6-d96ee76a221e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.640454] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427755} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.640740] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8/6a6dfc4e-39e6-4022-ae8f-fec7056fb6e8.vmdk to [datastore2] 9ea97120-d2f5-43e9-a929-8f416f735268/9ea97120-d2f5-43e9-a929-8f416f735268.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1303.640963] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1303.641254] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64018d39-5ca1-4ae5-9580-4c8ed96e2bdc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.648225] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1303.648225] env[62109]: value = "task-1117432" [ 1303.648225] env[62109]: _type = "Task" [ 1303.648225] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.656054] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.032349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.032613] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Processing image c70fef60-128e-462e-822d-a04d0800cff1 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.032931] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.033124] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.033316] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.033566] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b9a0dbd-8e7c-4879-999d-ba4b079c88f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.041470] env[62109]: DEBUG nova.network.neutron [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updated VIF entry in instance network info cache for port ea50f1f4-6955-463b-8cb2-d2e7ebbb6335. {{(pid=62109) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1304.041817] env[62109]: DEBUG nova.network.neutron [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.043848] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.044036] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1304.044921] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba927fb8-7bdb-4d0b-bfab-d8e1f5ab8d2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.050056] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1304.050056] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f21564-e508-7674-96e8-8b7d4db423e2" [ 1304.050056] env[62109]: _type = "Task" [ 1304.050056] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.058624] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]52f21564-e508-7674-96e8-8b7d4db423e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.157940] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061871} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.158233] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1304.158967] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e650bda9-d319-4ac9-8a66-d3fb690f908e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.179684] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 9ea97120-d2f5-43e9-a929-8f416f735268/9ea97120-d2f5-43e9-a929-8f416f735268.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1304.179926] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a71bc7e0-2e4e-4a26-bc50-694620125bf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.200139] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1304.200139] env[62109]: value = "task-1117433" [ 1304.200139] env[62109]: _type = "Task" [ 1304.200139] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.207447] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.546252] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d0f1d8b-c132-4ec4-acd9-555fc6d0a8ea req-2b37a82f-ad80-4769-adb8-24ec0dd18752 service nova] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.560706] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1304.560951] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Fetch image to [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170/OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1304.561156] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Downloading stream optimized image c70fef60-128e-462e-822d-a04d0800cff1 to [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170/OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170.vmdk on the data store datastore2 as vApp {{(pid=62109) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1304.561336] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Downloading image file data c70fef60-128e-462e-822d-a04d0800cff1 to the ESX as VM named 'OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170' {{(pid=62109) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1304.624832] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1304.624832] env[62109]: value = "resgroup-9" [ 1304.624832] env[62109]: _type = "ResourcePool" [ 1304.624832] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1304.625176] env[62109]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-aeb95dbd-3728-4d60-8f88-9877752451fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.647442] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease: (returnval){ [ 1304.647442] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523d8840-4bb6-b4e7-ca8e-36a219ee36e0" [ 1304.647442] env[62109]: _type = "HttpNfcLease" [ 1304.647442] env[62109]: } obtained for vApp import into resource pool (val){ [ 1304.647442] env[62109]: value = "resgroup-9" [ 1304.647442] env[62109]: _type = "ResourcePool" [ 1304.647442] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1304.647810] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the lease: (returnval){ [ 1304.647810] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523d8840-4bb6-b4e7-ca8e-36a219ee36e0" [ 1304.647810] env[62109]: _type = "HttpNfcLease" [ 1304.647810] env[62109]: } to be ready. {{(pid=62109) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1304.653938] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1304.653938] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523d8840-4bb6-b4e7-ca8e-36a219ee36e0" [ 1304.653938] env[62109]: _type = "HttpNfcLease" [ 1304.653938] env[62109]: } is initializing. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1304.709882] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117433, 'name': ReconfigVM_Task, 'duration_secs': 0.262662} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.710202] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 9ea97120-d2f5-43e9-a929-8f416f735268/9ea97120-d2f5-43e9-a929-8f416f735268.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1304.710912] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dca83047-fca8-463d-b4b8-f049f51336fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.717409] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1304.717409] env[62109]: value = "task-1117435" [ 1304.717409] env[62109]: _type = "Task" [ 1304.717409] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.727059] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117435, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.156135] env[62109]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1305.156135] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523d8840-4bb6-b4e7-ca8e-36a219ee36e0" [ 1305.156135] env[62109]: _type = "HttpNfcLease" [ 1305.156135] env[62109]: } is ready. {{(pid=62109) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1305.156426] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1305.156426] env[62109]: value = "session[52dd7133-ab40-7dd4-a628-2652b6f3a5df]523d8840-4bb6-b4e7-ca8e-36a219ee36e0" [ 1305.156426] env[62109]: _type = "HttpNfcLease" [ 1305.156426] env[62109]: }. {{(pid=62109) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1305.157128] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31de192d-6e32-4e0d-a3b1-6a6e5042934f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.164079] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk from lease info. {{(pid=62109) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1305.164261] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1305.229017] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6766fce9-89e1-4ac3-b825-5b767184ad17 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.230583] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117435, 'name': Rename_Task, 'duration_secs': 0.143815} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.232064] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1305.232608] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d615c5c-6fca-4a67-867a-c3f035c80926 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.239078] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1305.239078] env[62109]: value = "task-1117436" [ 1305.239078] env[62109]: _type = "Task" [ 1305.239078] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.246231] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.750873] env[62109]: DEBUG oslo_vmware.api [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117436, 'name': PowerOnVM_Task, 'duration_secs': 0.45353} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.752567] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1305.752791] env[62109]: INFO nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Took 6.71 seconds to spawn the instance on the hypervisor. [ 1305.752990] env[62109]: DEBUG nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1305.753874] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1042d1-8a2d-44bb-bb6a-9fc4e78477e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.275339] env[62109]: INFO nova.compute.manager [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Took 13.03 seconds to build instance. [ 1306.576095] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1306.576403] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1306.577358] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c214774-0a26-441e-a3f8-585cb9828d1e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.584971] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk is in state: ready. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1306.585240] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk. {{(pid=62109) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1306.585484] env[62109]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9cc284a1-2d29-424e-8025-e9a7de2a4bb8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.777667] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48465b99-44ab-49a0-bd75-c5ef4a6aae8c tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.547s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.816355] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a58b5f-098e-a6a0-83ec-965c629fb09f/disk-0.vmdk. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1306.816556] env[62109]: INFO nova.virt.vmwareapi.images [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Downloaded image file data c70fef60-128e-462e-822d-a04d0800cff1 [ 1306.817436] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1547d81-4ed2-473d-ae28-0ee166d12edd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.833510] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c58de3b-1e2b-404f-896b-da8eaeb3e23d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.850795] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "9ea97120-d2f5-43e9-a929-8f416f735268" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.851066] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.851269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.851459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.851635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.853951] env[62109]: INFO nova.compute.manager [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Terminating instance [ 1306.856348] env[62109]: INFO nova.virt.vmwareapi.images [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] The imported VM was unregistered [ 1306.858740] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1306.858979] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.859576] env[62109]: DEBUG nova.compute.manager [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1306.859766] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1306.860039] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff57736a-e1aa-4a95-81da-0d92d5f78f21 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.862407] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472d35c1-bc7c-400d-9938-de39d9680051 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.869114] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1306.869328] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-726bcc73-484b-4cbf-915e-2fa4c118428d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.874930] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1306.874930] env[62109]: value = "task-1117438" [ 1306.874930] env[62109]: _type = "Task" [ 1306.874930] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.882319] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.883326] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Created directory with path [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.883512] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170/OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170.vmdk to [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk. {{(pid=62109) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1306.883737] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2ecc7e59-8dc7-417b-8eff-400ce11ed923 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.889750] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1306.889750] env[62109]: value = "task-1117439" [ 1306.889750] env[62109]: _type = "Task" [ 1306.889750] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.898333] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.386349] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117438, 'name': PowerOffVM_Task, 'duration_secs': 0.364563} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.386772] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1307.387017] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1307.387335] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16cfee9a-b7a3-4a13-82dc-72cf397709e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.404116] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.481044] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1307.481396] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1307.481620] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Deleting the datastore file [datastore2] 9ea97120-d2f5-43e9-a929-8f416f735268 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1307.481953] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0850b27-01c9-44d3-8900-e07c2f6fe04f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.488580] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for the task: (returnval){ [ 1307.488580] env[62109]: value = "task-1117441" [ 1307.488580] env[62109]: _type = "Task" [ 1307.488580] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.497877] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.900293] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.999616] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.411144] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.499583] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.902384] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.000858] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.403081] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117439, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.4611} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.403379] env[62109]: INFO nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170/OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170.vmdk to [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk. [ 1309.403575] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Cleaning up location [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1309.403746] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7c997de5-d0eb-4e37-895f-19944e5f1170 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1309.403998] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861582bf-9ede-45cf-a053-43b4dbaaef49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.410222] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1309.410222] env[62109]: value = "task-1117442" [ 1309.410222] env[62109]: _type = "Task" [ 1309.410222] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.417611] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.500215] env[62109]: DEBUG oslo_vmware.api [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Task: {'id': task-1117441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.62362} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.500446] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1309.500636] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1309.500818] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1309.500998] env[62109]: INFO nova.compute.manager [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Took 2.64 seconds to destroy the instance on the hypervisor. [ 1309.501262] env[62109]: DEBUG oslo.service.loopingcall [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.501452] env[62109]: DEBUG nova.compute.manager [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1309.501546] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1309.829712] env[62109]: DEBUG nova.compute.manager [req-71f6948f-4d09-4fda-9512-30cb21d802ae req-2946418a-c9c8-4fc2-b349-8114f421aaaa service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Received event network-vif-deleted-3996353f-47fa-4159-a6b8-4f78c5718559 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1309.829929] env[62109]: INFO nova.compute.manager [req-71f6948f-4d09-4fda-9512-30cb21d802ae req-2946418a-c9c8-4fc2-b349-8114f421aaaa service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Neutron deleted interface 3996353f-47fa-4159-a6b8-4f78c5718559; detaching it from the instance and deleting it from the info cache [ 1309.830132] env[62109]: DEBUG nova.network.neutron [req-71f6948f-4d09-4fda-9512-30cb21d802ae req-2946418a-c9c8-4fc2-b349-8114f421aaaa service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.920296] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033083} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.920727] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1309.920727] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.920931] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk to [datastore2] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1309.921196] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7e9b870-d9f7-4859-b6ed-e4fa385daa4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.927728] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1309.927728] env[62109]: value = "task-1117443" [ 1309.927728] env[62109]: _type = "Task" [ 1309.927728] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.934974] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.308557] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.333085] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-865c9e99-d502-4871-a764-8914bc91e40a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.345322] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d00d6c-00db-44bf-859a-bf96342068fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.370883] env[62109]: DEBUG nova.compute.manager [req-71f6948f-4d09-4fda-9512-30cb21d802ae req-2946418a-c9c8-4fc2-b349-8114f421aaaa service nova] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Detach interface failed, port_id=3996353f-47fa-4159-a6b8-4f78c5718559, reason: Instance 9ea97120-d2f5-43e9-a929-8f416f735268 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1310.438614] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.812050] env[62109]: INFO nova.compute.manager [-] [instance: 9ea97120-d2f5-43e9-a929-8f416f735268] Took 1.31 seconds to deallocate network for instance. [ 1310.939285] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.319855] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.320149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.320387] env[62109]: DEBUG nova.objects.instance [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lazy-loading 'resources' on Instance uuid 9ea97120-d2f5-43e9-a929-8f416f735268 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.439782] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.891294] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71ddf4a-9eee-4bf9-bac8-06cfb6b79045 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.901498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b829bbf2-57ac-46be-aaca-092aded7e6f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.954543] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41651962-64c1-405d-a974-83dbf91cca44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.973729] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.980200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0504cd2a-f727-46c2-a0a7-0ff2b6657f10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.995107] env[62109]: DEBUG nova.compute.provider_tree [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.456928] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117443, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.166406} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.457221] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c70fef60-128e-462e-822d-a04d0800cff1/c70fef60-128e-462e-822d-a04d0800cff1.vmdk to [datastore2] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1312.457961] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a654f1a-af26-4b18-af4b-02f3479461b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.478821] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1312.479052] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31d37a73-fd18-4e4a-a7c3-5efb529a1056 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.497742] env[62109]: DEBUG nova.scheduler.client.report [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1312.500669] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1312.500669] env[62109]: value = "task-1117444" [ 1312.500669] env[62109]: _type = "Task" [ 1312.500669] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.508406] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117444, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.005966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.686s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.014349] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.025974] env[62109]: INFO nova.scheduler.client.report [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Deleted allocations for instance 9ea97120-d2f5-43e9-a929-8f416f735268 [ 1313.511180] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117444, 'name': ReconfigVM_Task, 'duration_secs': 0.523678} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.511407] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Reconfigured VM instance instance-00000073 to attach disk [datastore2] cbc1367e-3d62-4e33-aaad-5112319c1326/cbc1367e-3d62-4e33-aaad-5112319c1326.vmdk or device None with type streamOptimized {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.512074] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7447bd4e-d2cd-49fe-9271-7009d02f4803 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.518429] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1313.518429] env[62109]: value = "task-1117445" [ 1313.518429] env[62109]: _type = "Task" [ 1313.518429] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.526190] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117445, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.533039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9f9774f7-1d53-4373-8e35-7e50f42c6156 tempest-ServersNegativeTestMultiTenantJSON-2043625977 tempest-ServersNegativeTestMultiTenantJSON-2043625977-project-member] Lock "9ea97120-d2f5-43e9-a929-8f416f735268" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.682s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.029135] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117445, 'name': Rename_Task, 'duration_secs': 0.126228} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.029525] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1314.029738] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a813c9c-835a-4f60-a5d1-441fb39fe61b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.035720] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1314.035720] env[62109]: value = "task-1117446" [ 1314.035720] env[62109]: _type = "Task" [ 1314.035720] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.043987] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.546706] env[62109]: DEBUG oslo_vmware.api [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117446, 'name': PowerOnVM_Task, 'duration_secs': 0.402999} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.546940] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1314.669616] env[62109]: DEBUG nova.compute.manager [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1314.670565] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce06854-52a0-4ae4-afe1-fbcd58dea746 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.186397] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1605dde7-0fc6-4c9b-aa99-7e21fdd1d193 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.789s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.646411] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1342.646055] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.646547] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1344.646625] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1344.647058] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1345.682381] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.682630] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquired lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.682674] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1346.896510] env[62109]: DEBUG nova.network.neutron [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [{"id": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "address": "fa:16:3e:cf:6e:94", "network": {"id": "26b36aea-1cdb-4b64-8715-107ab1f105e5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1687256279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "430353b9a427408494b462b49f11354a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea50f1f4-69", "ovs_interfaceid": "ea50f1f4-6955-463b-8cb2-d2e7ebbb6335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.398867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Releasing lock "refresh_cache-cbc1367e-3d62-4e33-aaad-5112319c1326" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.399114] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1347.399342] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.399518] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.399664] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.399793] env[62109]: DEBUG nova.compute.manager [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1347.646347] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.646526] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.151189] env[62109]: DEBUG oslo_service.periodic_task [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.654131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.654386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.654568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.654749] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1348.655677] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5567a730-bde2-4145-9e86-3371f762af8c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.664947] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11436ab-406b-40be-891c-ce1046d59f66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.678510] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee959feb-144e-4415-a49e-fb7087009cd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.684518] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fc9e77-e4a7-40a4-83b4-9766b63121be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.712434] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181358MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1348.712576] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.712758] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.736544] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance dbc9caa9-3b43-4ce9-b91d-6f259b0f81e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.736785] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Instance cbc1367e-3d62-4e33-aaad-5112319c1326 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.736932] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1349.737087] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1349.753110] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing inventories for resource provider 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1349.766602] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating ProviderTree inventory for provider 574e9717-c25e-453d-8028-45d9e2f95398 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1349.766783] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Updating inventory in ProviderTree for provider 574e9717-c25e-453d-8028-45d9e2f95398 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1349.777057] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing aggregate associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1349.792669] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Refreshing trait associations for resource provider 574e9717-c25e-453d-8028-45d9e2f95398, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1349.822998] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0b27ba-cfb6-4b94-af66-55fc55fa6f45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.830403] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a90ee2-bdea-47fb-a5ad-858d398eb51e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.862663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50379f0-8f26-45c0-b1c3-80f6b54653f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.869678] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52eeb347-a543-4956-8b2d-39b36a6fddb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.882346] env[62109]: DEBUG nova.compute.provider_tree [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.385384] env[62109]: DEBUG nova.scheduler.client.report [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.890501] env[62109]: DEBUG nova.compute.resource_tracker [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1350.890876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e2219196-fee3-4651-be35-cb42e09e35a4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.178s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.279118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.279485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.279623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.279813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.279997] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.283590] env[62109]: INFO nova.compute.manager [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Terminating instance [ 1352.285438] env[62109]: DEBUG nova.compute.manager [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1352.285663] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1352.286518] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b6c097-393f-4c0f-98b6-aa6d3b860126 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.294868] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1352.295112] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdd7337f-bd87-4d58-a93d-f044eaae2366 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.301305] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1352.301305] env[62109]: value = "task-1117447" [ 1352.301305] env[62109]: _type = "Task" [ 1352.301305] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.310026] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.811237] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117447, 'name': PowerOffVM_Task, 'duration_secs': 0.199194} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.811513] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1352.811689] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1352.811933] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fee36ae-00ed-4d08-b1f1-82994bd8d217 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.916593] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1352.916852] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1352.916995] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleting the datastore file [datastore2] cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.917277] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a31420ad-fa31-420b-b472-a32f94ee5ba2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.924667] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for the task: (returnval){ [ 1352.924667] env[62109]: value = "task-1117449" [ 1352.924667] env[62109]: _type = "Task" [ 1352.924667] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.932038] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.436181] env[62109]: DEBUG oslo_vmware.api [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Task: {'id': task-1117449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127935} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.436551] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.436699] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1353.436784] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1353.436966] env[62109]: INFO nova.compute.manager [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1353.437229] env[62109]: DEBUG oslo.service.loopingcall [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.437420] env[62109]: DEBUG nova.compute.manager [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1353.437518] env[62109]: DEBUG nova.network.neutron [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1353.832186] env[62109]: DEBUG nova.compute.manager [req-155f367e-7f4a-4f76-a089-839afcc51314 req-0a300456-2b6b-4a19-8303-07b3e25a486c service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Received event network-vif-deleted-ea50f1f4-6955-463b-8cb2-d2e7ebbb6335 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1353.832357] env[62109]: INFO nova.compute.manager [req-155f367e-7f4a-4f76-a089-839afcc51314 req-0a300456-2b6b-4a19-8303-07b3e25a486c service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Neutron deleted interface ea50f1f4-6955-463b-8cb2-d2e7ebbb6335; detaching it from the instance and deleting it from the info cache [ 1353.832582] env[62109]: DEBUG nova.network.neutron [req-155f367e-7f4a-4f76-a089-839afcc51314 req-0a300456-2b6b-4a19-8303-07b3e25a486c service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.313054] env[62109]: DEBUG nova.network.neutron [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.335620] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d81fab2c-5d27-48d5-a3b2-5ae577174645 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.347233] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39ebab1-12b1-4a1d-8d81-4e3a8947b862 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.370943] env[62109]: DEBUG nova.compute.manager [req-155f367e-7f4a-4f76-a089-839afcc51314 req-0a300456-2b6b-4a19-8303-07b3e25a486c service nova] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Detach interface failed, port_id=ea50f1f4-6955-463b-8cb2-d2e7ebbb6335, reason: Instance cbc1367e-3d62-4e33-aaad-5112319c1326 could not be found. {{(pid=62109) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1354.815303] env[62109]: INFO nova.compute.manager [-] [instance: cbc1367e-3d62-4e33-aaad-5112319c1326] Took 1.38 seconds to deallocate network for instance. [ 1355.321849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.322144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.322408] env[62109]: DEBUG nova.objects.instance [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lazy-loading 'resources' on Instance uuid cbc1367e-3d62-4e33-aaad-5112319c1326 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.865811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bbf68d-2d09-4019-ab64-0caf4ac60657 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.873528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a272b392-74fe-4e31-839d-dd914393b900 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.904515] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749d3ebe-2529-4332-9196-416cf51364aa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.911684] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a741348-30aa-4a4e-8447-58bd9671c5b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.924546] env[62109]: DEBUG nova.compute.provider_tree [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed in ProviderTree for provider: 574e9717-c25e-453d-8028-45d9e2f95398 {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.427581] env[62109]: DEBUG nova.scheduler.client.report [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Inventory has not changed for provider 574e9717-c25e-453d-8028-45d9e2f95398 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1356.932612] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.951729] env[62109]: INFO nova.scheduler.client.report [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Deleted allocations for instance cbc1367e-3d62-4e33-aaad-5112319c1326 [ 1357.458926] env[62109]: DEBUG oslo_concurrency.lockutils [None req-133e68a7-63d3-44fb-bf4b-b72105475678 tempest-AttachVolumeShelveTestJSON-383840318 tempest-AttachVolumeShelveTestJSON-383840318-project-member] Lock "cbc1367e-3d62-4e33-aaad-5112319c1326" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}